1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <console/console.h>
5 #include <device/device.h>
6 #include <device/dram/ddr4.h>
8 #include <memory_info.h>
12 enum ddr4_speed_grade
{
22 struct ddr4_speed_attr
{
23 uint32_t min_clock_mhz
; // inclusive
24 uint32_t max_clock_mhz
; // inclusive
25 uint32_t reported_mts
;
29 * DDR4 speed attributes derived from JEDEC 79-4C tables 169 & 170
31 * min_clock_mhz = 1000/max_tCk_avg(ns) + 1
32 * Adding 1 to make minimum inclusive
33 * max_clock_mhz = 1000/min_tCk_avg(ns)
34 * reported_mts = Standard reported DDR4 speed in MT/s
35 * May be 1 less than the actual max MT/s
37 static const struct ddr4_speed_attr ddr4_speeds
[] = {
38 [DDR4_1600
] = {.min_clock_mhz
= 668, .max_clock_mhz
= 800, .reported_mts
= 1600},
39 [DDR4_1866
] = {.min_clock_mhz
= 801, .max_clock_mhz
= 934, .reported_mts
= 1866},
40 [DDR4_2133
] = {.min_clock_mhz
= 935, .max_clock_mhz
= 1067, .reported_mts
= 2133},
41 [DDR4_2400
] = {.min_clock_mhz
= 1068, .max_clock_mhz
= 1200, .reported_mts
= 2400},
42 [DDR4_2666
] = {.min_clock_mhz
= 1201, .max_clock_mhz
= 1333, .reported_mts
= 2666},
43 [DDR4_2933
] = {.min_clock_mhz
= 1334, .max_clock_mhz
= 1466, .reported_mts
= 2933},
44 [DDR4_3200
] = {.min_clock_mhz
= 1467, .max_clock_mhz
= 1600, .reported_mts
= 3200}
48 BLOCK_0
, /* Base Configuration and DRAM Parameters */
50 BLOCK_1_L
, /* Standard Module Parameters */
51 BLOCK_1_H
, /* Hybrid Module Parameters */
53 BLOCK_2_L
, /* Hybrid Module Extended Function Parameters */
54 BLOCK_2_H
, /* Manufacturing Information */
55 BLOCK_3
/* End user programmable */
60 uint16_t start
; /* starting offset from beginning of the spd */
61 uint16_t len
; /* size of the block */
62 uint16_t crc_start
; /* offset from start of crc bytes, 0 if none */
65 /* 'SPD contents architecture' as per datasheet */
66 const spd_block spd_blocks
[] = {
67 {.type
= BLOCK_0
, 0, 128, 126}, {.type
= BLOCK_1
, 128, 128, 126},
68 {.type
= BLOCK_1_L
, 128, 64, 0}, {.type
= BLOCK_1_H
, 192, 64, 0},
69 {.type
= BLOCK_2_L
, 256, 64, 62}, {.type
= BLOCK_2_H
, 320, 64, 0},
70 {.type
= BLOCK_3
, 384, 128, 0}
73 static bool verify_block(const spd_block
*block
, spd_ddr4_raw_data spd
)
75 uint16_t crc
, spd_crc
;
77 spd_crc
= (spd
[block
->start
+ block
->crc_start
+ 1] << 8) |
78 spd
[block
->start
+ block
->crc_start
];
79 crc
= ddr_crc16(&spd
[block
->start
], block
->len
- 2);
81 return spd_crc
== crc
;
84 /* Check if given block is 'reserved' for a given module type */
85 static bool block_exists(spd_block_type type
, u8 dimm_type
)
90 case BLOCK_0
: /* fall-through */
91 case BLOCK_1
: /* fall-through */
92 case BLOCK_1_L
: /* fall-through */
93 case BLOCK_1_H
: /* fall-through */
94 case BLOCK_2_H
: /* fall-through */
95 case BLOCK_3
: /* fall-through */
98 is_hybrid
= (dimm_type
>> 4) & ((1 << 3) - 1);
102 default: /* fall-through */
108 * Converts DDR4 clock speed in MHz to the standard reported speed in MT/s
110 uint16_t ddr4_speed_mhz_to_reported_mts(uint16_t speed_mhz
)
112 for (enum ddr4_speed_grade speed
= 0; speed
< ARRAY_SIZE(ddr4_speeds
); speed
++) {
113 const struct ddr4_speed_attr
*speed_attr
= &ddr4_speeds
[speed
];
114 if (speed_mhz
>= speed_attr
->min_clock_mhz
&&
115 speed_mhz
<= speed_attr
->max_clock_mhz
) {
116 return speed_attr
->reported_mts
;
119 printk(BIOS_ERR
, "DDR4 speed of %d MHz is out of range\n", speed_mhz
);
124 * \brief Decode the raw SPD data
126 * Decodes a raw SPD data from a DDR4 DIMM, and organizes it into a
127 * @ref dimm_attr structure. The SPD data must first be read in a contiguous
128 * array, and passed to this function.
130 * @param dimm pointer to @ref dimm_attr structure where the decoded data is to
132 * @param spd array of raw data previously read from the SPD.
134 * @return @ref spd_status enumerator
135 * SPD_STATUS_OK -- decoding was successful
136 * SPD_STATUS_INVALID -- invalid SPD or not a DDR4 SPD
137 * SPD_STATUS_CRC_ERROR -- checksum mismatch
139 int spd_decode_ddr4(struct dimm_attr_ddr4_st
*dimm
, spd_ddr4_raw_data spd
)
142 u8 bus_width
, sdram_width
;
143 u16 cap_per_die_mbit
;
144 u16 spd_bytes_total
, spd_bytes_used
;
145 const uint16_t spd_bytes_used_table
[] = {0, 128, 256, 384, 512};
147 /* Make sure that the SPD dump is indeed from a DDR4 module */
148 if (spd
[2] != SPD_MEMORY_TYPE_DDR4_SDRAM
) {
149 printk(BIOS_ERR
, "Not a DDR4 SPD!\n");
150 dimm
->dram_type
= SPD_MEMORY_TYPE_UNDEFINED
;
151 return SPD_STATUS_INVALID
;
154 spd_bytes_total
= (spd
[0] >> 4) & 0x7;
155 spd_bytes_used
= spd
[0] & 0xf;
157 if (!spd_bytes_total
|| !spd_bytes_used
) {
158 printk(BIOS_ERR
, "SPD failed basic sanity checks\n");
159 return SPD_STATUS_INVALID
;
162 if (spd_bytes_total
>= 3)
163 printk(BIOS_WARNING
, "SPD Bytes Total value is reserved\n");
165 spd_bytes_total
= 256 << (spd_bytes_total
- 1);
167 if (spd_bytes_used
> 4) {
168 printk(BIOS_ERR
, "SPD Bytes Used value is reserved\n");
169 return SPD_STATUS_INVALID
;
172 spd_bytes_used
= spd_bytes_used_table
[spd_bytes_used
];
174 if (spd_bytes_used
> spd_bytes_total
) {
175 printk(BIOS_ERR
, "SPD Bytes Used is greater than SPD Bytes Total\n");
176 return SPD_STATUS_INVALID
;
179 /* Verify CRC of blocks that have them, do not step over 'used' length */
180 for (int i
= 0; i
< ARRAY_SIZE(spd_blocks
); i
++) {
181 /* this block is not checksummed */
182 if (spd_blocks
[i
].crc_start
== 0)
184 /* we shouldn't have this block */
185 if (spd_blocks
[i
].start
+ spd_blocks
[i
].len
> spd_bytes_used
)
187 /* check if block exists in the current schema */
188 if (!block_exists(spd_blocks
[i
].type
, spd
[3]))
190 if (!verify_block(&spd_blocks
[i
], spd
)) {
191 printk(BIOS_ERR
, "CRC failed for block %d\n", i
);
192 return SPD_STATUS_CRC_ERROR
;
196 dimm
->dram_type
= SPD_MEMORY_TYPE_DDR4_SDRAM
;
197 dimm
->dimm_type
= spd
[3] & ((1 << 4) - 1);
199 reg8
= spd
[13] & ((1 << 4) - 1);
200 dimm
->bus_width
= reg8
;
201 bus_width
= 8 << (reg8
& ((1 << 3) - 1));
203 reg8
= spd
[12] & ((1 << 3) - 1);
204 dimm
->sdram_width
= reg8
;
205 sdram_width
= 4 << reg8
;
207 reg8
= spd
[4] & ((1 << 4) - 1);
208 dimm
->cap_per_die_mbit
= reg8
;
209 cap_per_die_mbit
= (1 << reg8
) * 256;
211 reg8
= (spd
[12] >> 3) & ((1 << 3) - 1);
212 dimm
->ranks
= reg8
+ 1;
214 if (!bus_width
|| !sdram_width
) {
215 printk(BIOS_ERR
, "SPD information is invalid");
217 return SPD_STATUS_INVALID
;
220 /* seems to be only one, in mV */
221 dimm
->vdd_voltage
= 1200;
224 /* FIXME: this is wrong for 3DS devices */
225 dimm
->size_mb
= cap_per_die_mbit
/ 8 * bus_width
/ sdram_width
* dimm
->ranks
;
227 dimm
->ecc_extension
= spd
[SPD_PRIMARY_SDRAM_WIDTH
] & SPD_ECC_8BIT
;
229 /* make sure we have the manufacturing information block */
230 if (spd_bytes_used
> 320) {
231 dimm
->manufacturer_id
= (spd
[351] << 8) | spd
[350];
232 memcpy(dimm
->part_number
, &spd
[329], SPD_DDR4_PART_LEN
);
233 dimm
->part_number
[SPD_DDR4_PART_LEN
] = 0;
234 memcpy(dimm
->serial_number
, &spd
[325], sizeof(dimm
->serial_number
));
236 return SPD_STATUS_OK
;
239 enum cb_err
spd_add_smbios17_ddr4(const u8 channel
, const u8 slot
, const u16 selected_freq
,
240 const struct dimm_attr_ddr4_st
*info
)
242 struct memory_info
*mem_info
;
243 struct dimm_info
*dimm
;
246 * Allocate CBMEM area for DIMM information used to populate SMBIOS
249 mem_info
= cbmem_find(CBMEM_ID_MEMINFO
);
251 mem_info
= cbmem_add(CBMEM_ID_MEMINFO
, sizeof(*mem_info
));
253 printk(BIOS_DEBUG
, "CBMEM entry for DIMM info: %p\n", mem_info
);
257 memset(mem_info
, 0, sizeof(*mem_info
));
260 if (mem_info
->dimm_cnt
>= ARRAY_SIZE(mem_info
->dimm
)) {
261 printk(BIOS_WARNING
, "BUG: Too many DIMM infos for %s.\n", __func__
);
265 dimm
= &mem_info
->dimm
[mem_info
->dimm_cnt
];
267 dimm
->ddr_type
= MEMORY_TYPE_DDR4
;
268 dimm
->ddr_frequency
= selected_freq
;
269 dimm
->dimm_size
= info
->size_mb
;
270 dimm
->channel_num
= channel
;
271 dimm
->rank_per_dimm
= info
->ranks
;
272 dimm
->dimm_num
= slot
;
273 memcpy(dimm
->module_part_number
, info
->part_number
, SPD_DDR4_PART_LEN
);
274 dimm
->mod_id
= info
->manufacturer_id
;
275 dimm
->mod_type
= info
->dimm_type
;
276 dimm
->bus_width
= info
->bus_width
;
277 memcpy(dimm
->serial
, info
->serial_number
,
278 MIN(sizeof(dimm
->serial
), sizeof(info
->serial_number
)));
280 dimm
->vdd_voltage
= info
->vdd_voltage
;
281 mem_info
->dimm_cnt
++;
287 /* Returns MRS command */
288 static uint32_t ddr4_wr_to_mr0_map(u8 wr
)
290 static const unsigned int enc
[] = {0, 1, 2, 3, 4, 5, 7, 6, 8};
291 int wr_idx
= wr
/2 - 5;
292 if (wr_idx
< 0 || wr_idx
>= ARRAY_SIZE(enc
))
293 die("WR index out of bounds: %d (derived from %d)\n", wr_idx
, wr
);
295 return enc
[wr_idx
] << 9;
298 /* Returns MRS command */
299 static uint32_t ddr4_cas_to_mr0_map(u8 cas
)
301 static const unsigned int enc
[] = {
303 * The only non-zero bits are at positions (LSB0): 12, 6, 5, 4, 2.
306 0x0004, /* CL = 10 */
307 0x0010, /* CL = 11 */
308 0x0014, /* CL = 12 */
309 0x0020, /* CL = 13 */
310 0x0024, /* CL = 14 */
311 0x0030, /* CL = 15 */
312 0x0034, /* CL = 16 */
313 0x0064, /* CL = 17 */
314 0x0040, /* CL = 18 */
315 0x0070, /* CL = 19 */
316 0x0044, /* CL = 20 */
317 0x0074, /* CL = 21 */
318 0x0050, /* CL = 22 */
319 0x0060, /* CL = 23 */
320 0x0054, /* CL = 24 */
321 0x1000, /* CL = 25 */
322 0x1004, /* CL = 26 */
323 0x1010, /* CL = 27 (only 3DS) */
324 0x1014, /* CL = 28 */
325 0x1020, /* reserved for CL = 29 */
326 0x1024, /* CL = 30 */
327 0x1030, /* reserved for CL = 31 */
328 0x1034, /* CL = 32 */
331 int cas_idx
= cas
- 9;
332 if (cas_idx
< 0 || cas_idx
>= ARRAY_SIZE(enc
))
333 die("CAS index out of bounds: %d (derived from %d)\n", cas_idx
, cas
);
338 uint32_t ddr4_get_mr0(u8 write_recovery
,
339 enum ddr4_mr0_dll_reset dll_reset
,
341 enum ddr4_mr0_burst_type burst_type
,
342 enum ddr4_mr0_burst_length burst_length
)
344 uint32_t cmd
= 0 << 20;
346 cmd
|= ddr4_wr_to_mr0_map(write_recovery
);
347 cmd
|= dll_reset
<< 8;
348 cmd
|= DDR4_MR0_MODE_NORMAL
<< 7;
349 cmd
|= ddr4_cas_to_mr0_map(cas
);
350 cmd
|= burst_type
<< 3;
351 cmd
|= burst_length
<< 0;
356 uint32_t ddr4_get_mr1(enum ddr4_mr1_qoff qoff
,
357 enum ddr4_mr1_tdqs tdqs
,
358 enum ddr4_mr1_rtt_nom rtt_nom
,
359 enum ddr4_mr1_write_leveling write_leveling
,
360 enum ddr4_mr1_odimp output_drive_impedance
,
361 enum ddr4_mr1_additive_latency additive_latency
,
362 enum ddr4_mr1_dll dll_enable
)
364 uint32_t cmd
= 1 << 20;
369 cmd
|= write_leveling
<< 7;
370 cmd
|= output_drive_impedance
<< 1;
371 cmd
|= additive_latency
<< 3;
372 cmd
|= dll_enable
<< 0;
377 /* Returns MRS command */
378 static uint32_t ddr4_cwl_to_mr2_map(u8 cwl
)
380 /* Encoding is (starting with 0): 9, 10, 11, 12, 14, 16, 18, 20 */
384 cwl
= (cwl
- 14) / 2 + 4;
389 uint32_t ddr4_get_mr2(enum ddr4_mr2_wr_crc wr_crc
,
390 enum ddr4_mr2_rtt_wr rtt_wr
,
391 enum ddr4_mr2_lp_asr self_refresh
, u8 cwl
)
393 uint32_t cmd
= 2 << 20;
397 cmd
|= self_refresh
<< 6;
398 cmd
|= ddr4_cwl_to_mr2_map(cwl
);
403 uint32_t ddr4_get_mr3(enum ddr4_mr3_mpr_read_format mpr_read_format
,
404 enum ddr4_mr3_wr_cmd_lat_crc_dm command_latency_crc_dm
,
405 enum ddr4_mr3_fine_gran_ref fine_refresh
,
406 enum ddr4_mr3_temp_sensor_readout temp_sensor
,
407 enum ddr4_mr3_pda pda
,
408 enum ddr4_mr3_geardown_mode geardown
,
409 enum ddr4_mr3_mpr_operation mpr_operation
,
412 uint32_t cmd
= 3 << 20;
414 cmd
|= mpr_read_format
<< 11;
415 cmd
|= command_latency_crc_dm
<< 9;
416 cmd
|= fine_refresh
<< 6;
417 cmd
|= temp_sensor
<< 5;
419 cmd
|= geardown
<< 3;
420 cmd
|= mpr_operation
<< 2;
421 cmd
|= (mpr_page
& 3) << 0;
426 uint32_t ddr4_get_mr4(enum ddr4_mr4_hppr hppr
,
427 enum ddr4_mr4_wr_preamble wr_preamble
,
428 enum ddr4_mr4_rd_preamble rd_preamble
,
429 enum ddr4_mr4_rd_preamble_training rd_preamble_train
,
430 enum ddr4_mr4_self_refr_abort self_ref_abrt
,
431 enum ddr4_mr4_cs_to_cmd_latency cs2cmd_lat
,
432 enum ddr4_mr4_sppr sppr
,
433 enum ddr4_mr4_internal_vref_mon int_vref_mon
,
434 enum ddr4_mr4_temp_controlled_refr temp_ctrl_ref
,
435 enum ddr4_mr4_max_pd_mode max_pd
)
437 uint32_t cmd
= 4 << 20;
440 cmd
|= wr_preamble
<< 12;
441 cmd
|= rd_preamble
<< 11;
442 cmd
|= rd_preamble_train
<< 10;
443 cmd
|= self_ref_abrt
<< 9;
444 cmd
|= cs2cmd_lat
<< 6;
446 cmd
|= int_vref_mon
<< 4;
447 cmd
|= temp_ctrl_ref
<< 2;
453 uint32_t ddr4_get_mr5(enum ddr4_mr5_rd_dbi rd_dbi
,
454 enum ddr4_mr5_wr_dbi wr_dbi
,
455 enum ddr4_mr5_data_mask dm
,
456 enum ddr4_mr5_rtt_park rtt_park
,
457 enum ddr4_mr5_odt_pd odt_pd
,
458 enum ddr4_mr5_ca_parity_lat pl
)
460 uint32_t cmd
= 5 << 20;
465 cmd
|= rtt_park
<< 6;
472 /* Returns MRS command */
473 static uint32_t ddr4_tccd_l_to_mr6_map(u8 tccd_l
)
475 if (tccd_l
< 4 || tccd_l
> 8)
476 die("tCCD_l out of range: %d\n", tccd_l
);
478 return (tccd_l
- 4) << 10;
481 uint32_t ddr4_get_mr6(u8 tccd_l
,
482 enum ddr4_mr6_vrefdq_training vrefdq_training
,
483 enum ddr4_mr6_vrefdq_training_range range
,
486 uint32_t cmd
= 6 << 20;
488 cmd
|= ddr4_tccd_l_to_mr6_map(tccd_l
);
489 cmd
|= vrefdq_training
<< 7;
491 cmd
|= vrefdq_value
& 0x3F;
497 * ZQCL: A16 = H, A15 = H, A14 = L, A10 = H, rest either L or H
498 * ZQCS: A16 = H, A15 = H, A14 = L, A10 = L, rest either L or H
500 uint32_t ddr4_get_zqcal_cmd(enum ddr4_zqcal_ls long_short
)
502 uint32_t cmd
= 1 << 16 | 1 << 15;
504 cmd
|= long_short
<< 10;