1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 #include <console/console.h>
5 #include <soc/meminit.h>
8 #define LP4X_CH_WIDTH 16
9 #define LP4X_CHANNELS CHANNEL_COUNT(LP4X_CH_WIDTH)
11 #define DDR4_CH_WIDTH 64
12 #define DDR4_CHANNELS CHANNEL_COUNT(DDR4_CH_WIDTH)
14 static const struct soc_mem_cfg soc_mem_cfg
[] = {
16 .num_phys_channels
= DDR4_CHANNELS
,
23 * Only physical channel 0 is populated in case of half-populated
26 .half_channel
= BIT(0),
27 /* In mixed topologies, channel 0 is always memory-down. */
32 .num_phys_channels
= LP4X_CHANNELS
,
45 * Physical channels 0, 1, 2 and 3 are populated in case of
46 * half-populated configurations.
48 .half_channel
= BIT(0) | BIT(1) | BIT(2) | BIT(3),
49 /* LP4x does not support mixed topologies. */
54 static void mem_init_spd_upds(FSP_M_CONFIG
*mem_cfg
, const struct mem_channel_data
*data
)
56 uint32_t *spd_upds
[MRC_CHANNELS
][CONFIG_DIMMS_PER_CHANNEL
] = {
57 [0] = { &mem_cfg
->MemorySpdPtr000
, &mem_cfg
->MemorySpdPtr001
, },
58 [1] = { &mem_cfg
->MemorySpdPtr010
, &mem_cfg
->MemorySpdPtr011
, },
59 [2] = { &mem_cfg
->MemorySpdPtr020
, &mem_cfg
->MemorySpdPtr021
, },
60 [3] = { &mem_cfg
->MemorySpdPtr030
, &mem_cfg
->MemorySpdPtr031
, },
61 [4] = { &mem_cfg
->MemorySpdPtr100
, &mem_cfg
->MemorySpdPtr101
, },
62 [5] = { &mem_cfg
->MemorySpdPtr110
, &mem_cfg
->MemorySpdPtr111
, },
63 [6] = { &mem_cfg
->MemorySpdPtr120
, &mem_cfg
->MemorySpdPtr121
, },
64 [7] = { &mem_cfg
->MemorySpdPtr130
, &mem_cfg
->MemorySpdPtr131
, },
66 uint8_t *disable_dimm_upds
[MRC_CHANNELS
] = {
67 &mem_cfg
->DisableDimmMc0Ch0
,
68 &mem_cfg
->DisableDimmMc0Ch1
,
69 &mem_cfg
->DisableDimmMc0Ch2
,
70 &mem_cfg
->DisableDimmMc0Ch3
,
71 &mem_cfg
->DisableDimmMc1Ch0
,
72 &mem_cfg
->DisableDimmMc1Ch1
,
73 &mem_cfg
->DisableDimmMc1Ch2
,
74 &mem_cfg
->DisableDimmMc1Ch3
,
78 mem_cfg
->MemorySpdDataLen
= data
->spd_len
;
80 for (ch
= 0; ch
< MRC_CHANNELS
; ch
++) {
81 uint8_t *disable_dimm_ptr
= disable_dimm_upds
[ch
];
82 *disable_dimm_ptr
= 0;
84 for (dimm
= 0; dimm
< CONFIG_DIMMS_PER_CHANNEL
; dimm
++) {
85 uint32_t *spd_ptr
= spd_upds
[ch
][dimm
];
87 *spd_ptr
= data
->spd
[ch
][dimm
];
89 *disable_dimm_ptr
|= BIT(dimm
);
94 static void mem_init_dq_dqs_upds(void *upds
[MRC_CHANNELS
], const void *map
, size_t upd_size
,
95 const struct mem_channel_data
*data
)
99 for (i
= 0; i
< MRC_CHANNELS
; i
++, map
+= upd_size
) {
100 if (channel_is_populated(i
, MRC_CHANNELS
, data
->ch_population_flags
))
101 memcpy(upds
[i
], map
, upd_size
);
103 memset(upds
[i
], 0, upd_size
);
107 static void mem_init_dq_upds(FSP_M_CONFIG
*mem_cfg
, const struct mem_channel_data
*data
,
108 const struct mb_cfg
*mb_cfg
)
110 void *dq_upds
[MRC_CHANNELS
] = {
111 &mem_cfg
->DqMapCpu2DramMc0Ch0
,
112 &mem_cfg
->DqMapCpu2DramMc0Ch1
,
113 &mem_cfg
->DqMapCpu2DramMc0Ch2
,
114 &mem_cfg
->DqMapCpu2DramMc0Ch3
,
115 &mem_cfg
->DqMapCpu2DramMc1Ch0
,
116 &mem_cfg
->DqMapCpu2DramMc1Ch1
,
117 &mem_cfg
->DqMapCpu2DramMc1Ch2
,
118 &mem_cfg
->DqMapCpu2DramMc1Ch3
,
121 const size_t upd_size
= sizeof(mem_cfg
->DqMapCpu2DramMc0Ch0
);
123 _Static_assert(sizeof(mem_cfg
->DqMapCpu2DramMc0Ch0
) == CONFIG_MRC_CHANNEL_WIDTH
,
124 "Incorrect DQ UPD size!");
126 mem_init_dq_dqs_upds(dq_upds
, mb_cfg
->dq_map
, upd_size
, data
);
129 static void mem_init_dqs_upds(FSP_M_CONFIG
*mem_cfg
, const struct mem_channel_data
*data
,
130 const struct mb_cfg
*mb_cfg
)
132 void *dqs_upds
[MRC_CHANNELS
] = {
133 &mem_cfg
->DqsMapCpu2DramMc0Ch0
,
134 &mem_cfg
->DqsMapCpu2DramMc0Ch1
,
135 &mem_cfg
->DqsMapCpu2DramMc0Ch2
,
136 &mem_cfg
->DqsMapCpu2DramMc0Ch3
,
137 &mem_cfg
->DqsMapCpu2DramMc1Ch0
,
138 &mem_cfg
->DqsMapCpu2DramMc1Ch1
,
139 &mem_cfg
->DqsMapCpu2DramMc1Ch2
,
140 &mem_cfg
->DqsMapCpu2DramMc1Ch3
,
143 const size_t upd_size
= sizeof(mem_cfg
->DqsMapCpu2DramMc0Ch0
);
145 _Static_assert(sizeof(mem_cfg
->DqsMapCpu2DramMc0Ch0
) == CONFIG_MRC_CHANNEL_WIDTH
/ 8,
146 "Incorrect DQS UPD size!");
148 mem_init_dq_dqs_upds(dqs_upds
, mb_cfg
->dqs_map
, upd_size
, data
);
151 void memcfg_init(FSPM_UPD
*memupd
, const struct mb_cfg
*mb_cfg
,
152 const struct mem_spd
*spd_info
, bool half_populated
)
154 struct mem_channel_data data
;
155 FSP_M_CONFIG
*mem_cfg
= &memupd
->FspmConfig
;
157 if (mb_cfg
->type
>= ARRAY_SIZE(soc_mem_cfg
))
158 die("Invalid memory type(%x)!\n", mb_cfg
->type
);
160 mem_populate_channel_data(memupd
, &soc_mem_cfg
[mb_cfg
->type
], spd_info
, half_populated
,
162 mem_init_spd_upds(mem_cfg
, &data
);
163 mem_init_dq_upds(mem_cfg
, &data
, mb_cfg
);
164 mem_init_dqs_upds(mem_cfg
, &data
, mb_cfg
);
166 mem_cfg
->ECT
= mb_cfg
->ect
;
168 switch (mb_cfg
->type
) {
170 mem_cfg
->DqPinsInterleaved
= mb_cfg
->ddr4_config
.dq_pins_interleaved
;
173 /* LPDDR4x does not allow interleaved memory */
174 mem_cfg
->DqPinsInterleaved
= 0;
177 die("Unsupported memory type(%d)\n", mb_cfg
->type
);