1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <console/console.h>
8 #include <device/device.h>
9 #include <device/dram/ddr3.h>
10 #include <device/mmio.h>
11 #include <device/pci_ops.h>
12 #include <device/smbus_host.h>
14 #include <timestamp.h>
15 #include <mrc_cache.h>
16 #include <southbridge/intel/bd82x6x/me.h>
17 #include <southbridge/intel/bd82x6x/pch.h>
18 #include <cpu/x86/msr.h>
22 #include "raminit_common.h"
23 #include "sandybridge.h"
26 /* FIXME: no support for 3-channel chipsets */
28 static void wait_txt_clear(void)
30 struct cpuid_result cp
= cpuid_ext(1, 0);
32 /* Check if TXT is supported */
33 if (!(cp
.ecx
& (1 << 6)))
36 /* Some TXT public bit */
37 if (!(read32p(0xfed30010) & 1))
40 /* Wait for TXT clear */
41 while (!(read8p(0xfed40000) & (1 << 7)))
45 /* Disable a channel in ramctr_timing */
46 static void disable_channel(ramctr_timing
*ctrl
, int channel
)
48 ctrl
->rankmap
[channel
] = 0;
50 memset(&ctrl
->rank_mirror
[channel
][0], 0, sizeof(ctrl
->rank_mirror
[0]));
52 ctrl
->channel_size_mb
[channel
] = 0;
53 ctrl
->cmd_stretch
[channel
] = 0;
54 ctrl
->mad_dimm
[channel
] = 0;
55 memset(&ctrl
->timings
[channel
][0], 0, sizeof(ctrl
->timings
[0]));
56 memset(&ctrl
->info
.dimm
[channel
][0], 0, sizeof(ctrl
->info
.dimm
[0]));
59 static uint8_t nb_get_ecc_type(const uint32_t capid0_a
)
61 return capid0_a
& CAPID_ECCDIS
? MEMORY_ARRAY_ECC_NONE
: MEMORY_ARRAY_ECC_SINGLE_BIT
;
64 static uint16_t nb_slots_per_channel(const uint32_t capid0_a
)
66 return !(capid0_a
& CAPID_DDPCD
) + 1;
69 static uint16_t nb_number_of_channels(const uint32_t capid0_a
)
71 return !(capid0_a
& CAPID_PDCD
) + 1;
74 static uint32_t nb_max_chan_capacity_mib(const uint32_t capid0_a
)
78 /* Values from documentation, which assume two DIMMs per channel */
79 switch (CAPID_DDRSZ(capid0_a
)) {
94 /* Account for the maximum number of DIMMs per channel */
95 return (ddrsz
/ 2) * nb_slots_per_channel(capid0_a
);
98 /* Fill cbmem with information for SMBIOS type 16 and type 17 */
99 static void setup_sdram_meminfo(ramctr_timing
*ctrl
)
102 const u16 ddr_freq
= (1000 << 8) / ctrl
->tCK
;
104 FOR_ALL_CHANNELS
for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
105 enum cb_err ret
= spd_add_smbios17(channel
, slot
, ddr_freq
,
106 &ctrl
->info
.dimm
[channel
][slot
]);
107 if (ret
!= CB_SUCCESS
)
108 printk(BIOS_ERR
, "RAMINIT: Failed to add SMBIOS17\n");
111 /* The 'spd_add_smbios17' function allocates this CBMEM area */
112 struct memory_info
*m
= cbmem_find(CBMEM_ID_MEMINFO
);
116 const uint32_t capid0_a
= pci_read_config32(HOST_BRIDGE
, CAPID0_A
);
118 const uint16_t channels
= nb_number_of_channels(capid0_a
);
120 m
->ecc_type
= nb_get_ecc_type(capid0_a
);
121 m
->max_capacity_mib
= channels
* nb_max_chan_capacity_mib(capid0_a
);
122 m
->number_of_devices
= channels
* nb_slots_per_channel(capid0_a
);
125 /* Return CRC16 match for all SPDs */
126 static int verify_crc16_spds_ddr3(spd_ddr3_raw_data
*spd
, ramctr_timing
*ctrl
)
128 int channel
, slot
, spd_slot
;
132 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
133 spd_slot
= 2 * channel
+ slot
;
134 match
&= ctrl
->spd_crc
[channel
][slot
] ==
135 spd_ddr3_calc_unique_crc(spd
[spd_slot
], sizeof(spd_ddr3_raw_data
));
141 static void read_spd(spd_ddr3_raw_data
*spd
, u8 addr
, bool id_only
)
145 for (j
= SPD_DDR3_MOD_ID1
; j
< 128; j
++)
146 (*spd
)[j
] = smbus_read_byte(addr
, j
);
148 for (j
= 0; j
< SPD_SIZE_MAX_DDR3
; j
++)
149 (*spd
)[j
] = smbus_read_byte(addr
, j
);
153 static void mainboard_get_spd(spd_ddr3_raw_data
*spd
, bool id_only
)
155 const struct northbridge_intel_sandybridge_config
*cfg
= config_of_soc();
158 if (CONFIG(HAVE_SPD_IN_CBFS
)) {
159 struct spd_info spdi
= {0};
161 mb_get_spd_map(&spdi
);
164 uint8_t *spd_file
= cbfs_map("spd.bin", &spd_file_len
);
166 printk(BIOS_DEBUG
, "SPD index %d\n", spdi
.spd_index
);
168 /* SPD file sanity check */
170 die("SPD data %s!", "not found");
172 if (spd_file_len
< ((spdi
.spd_index
+ 1) * SPD_SIZE_MAX_DDR3
))
173 die("SPD data %s!", "incomplete");
176 * Copy SPD data specified by spd_info.spd_index to all slots marked as
179 * Read SPD data from slots with a real SMBus address.
181 for (i
= 0; i
< ARRAY_SIZE(spdi
.addresses
); i
++) {
182 if (spdi
.addresses
[i
] == SPD_MEMORY_DOWN
)
183 memcpy(&spd
[i
], spd_file
+ (spdi
.spd_index
* SPD_SIZE_MAX_DDR3
), SPD_SIZE_MAX_DDR3
);
184 else if (spdi
.addresses
[i
] != 0)
185 read_spd(&spd
[i
], spdi
.addresses
[i
], id_only
);
188 for (i
= 0; i
< ARRAY_SIZE(cfg
->spd_addresses
); i
++) {
189 if (cfg
->spd_addresses
[i
] != 0)
190 read_spd(&spd
[i
], cfg
->spd_addresses
[i
], id_only
);
192 } /* CONFIG(HAVE_SPD_IN_CBFS) */
195 static void dram_find_spds_ddr3(spd_ddr3_raw_data
*spd
, ramctr_timing
*ctrl
)
197 int dimms
= 0, ch_dimms
;
198 int channel
, slot
, spd_slot
;
199 bool can_use_ecc
= ctrl
->ecc_supported
;
201 memset(ctrl
->rankmap
, 0, sizeof(ctrl
->rankmap
));
203 ctrl
->extended_temperature_range
= 1;
204 ctrl
->auto_self_refresh
= 1;
207 ctrl
->channel_size_mb
[channel
] = 0;
210 /* Count dimms on channel */
211 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
212 spd_slot
= 2 * channel
+ slot
;
214 if (spd
[spd_slot
][SPD_MEMORY_TYPE
] == SPD_MEMORY_TYPE_SDRAM_DDR3
)
218 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
219 spd_slot
= 2 * channel
+ slot
;
220 printk(BIOS_DEBUG
, "SPD probe channel%d, slot%d\n", channel
, slot
);
222 struct dimm_attr_ddr3_st
*const dimm
= &ctrl
->info
.dimm
[channel
][slot
];
224 /* Search for XMP profile */
225 spd_xmp_decode_ddr3(dimm
, spd
[spd_slot
],
228 if (dimm
->dram_type
!= SPD_MEMORY_TYPE_SDRAM_DDR3
) {
229 printram("No valid XMP profile found.\n");
230 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
232 } else if (ch_dimms
> dimm
->dimms_per_channel
) {
234 "XMP profile supports %u DIMMs, but %u DIMMs are installed.\n",
235 dimm
->dimms_per_channel
, ch_dimms
);
237 if (CONFIG(NATIVE_RAMINIT_IGNORE_XMP_MAX_DIMMS
))
239 "XMP maximum DIMMs will be ignored.\n");
241 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
243 } else if (dimm
->voltage
!= 1500) {
244 /* TODO: Support DDR3 voltages other than 1500mV */
245 printram("XMP profile's requested %u mV is unsupported.\n",
248 if (CONFIG(NATIVE_RAMINIT_IGNORE_XMP_REQUESTED_VOLTAGE
))
250 "XMP requested voltage will be ignored.\n");
252 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
255 /* Fill in CRC16 for MRC cache */
256 ctrl
->spd_crc
[channel
][slot
] =
257 spd_ddr3_calc_unique_crc(spd
[spd_slot
], sizeof(spd_ddr3_raw_data
));
259 if (dimm
->dram_type
!= SPD_MEMORY_TYPE_SDRAM_DDR3
) {
260 /* Mark DIMM as invalid */
266 dram_print_spd_ddr3(dimm
);
268 ctrl
->rank_mirror
[channel
][slot
* 2] = 0;
269 ctrl
->rank_mirror
[channel
][slot
* 2 + 1] = dimm
->flags
.pins_mirrored
;
271 ctrl
->channel_size_mb
[channel
] += dimm
->size_mb
;
273 if (!dimm
->flags
.is_ecc
)
276 ctrl
->auto_self_refresh
&= dimm
->flags
.asr
;
278 ctrl
->extended_temperature_range
&= dimm
->flags
.ext_temp_refresh
;
280 ctrl
->rankmap
[channel
] |= ((1 << dimm
->ranks
) - 1) << (2 * slot
);
282 printk(BIOS_DEBUG
, "channel[%d] rankmap = 0x%x\n", channel
,
283 ctrl
->rankmap
[channel
]);
286 const u8 rc_0
= ctrl
->info
.dimm
[channel
][0].reference_card
;
287 const u8 rc_1
= ctrl
->info
.dimm
[channel
][1].reference_card
;
289 if (ch_dimms
== NUM_SLOTS
&& rc_0
< 6 && rc_1
< 6) {
290 const int ref_card_offset_table
[6][6] = {
291 { 0, 0, 0, 0, 2, 2 },
292 { 0, 0, 0, 0, 2, 2 },
293 { 0, 0, 0, 0, 2, 2 },
294 { 0, 0, 0, 0, 1, 1 },
295 { 2, 2, 2, 1, 0, 0 },
296 { 2, 2, 2, 1, 0, 0 },
298 ctrl
->ref_card_offset
[channel
] = ref_card_offset_table
[rc_0
][rc_1
];
300 ctrl
->ref_card_offset
[channel
] = 0;
304 if (ctrl
->ecc_forced
|| CONFIG(RAMINIT_ENABLE_ECC
))
305 ctrl
->ecc_enabled
= can_use_ecc
;
306 if (ctrl
->ecc_forced
&& !ctrl
->ecc_enabled
)
307 die("ECC mode forced but non-ECC DIMM installed!");
308 printk(BIOS_DEBUG
, "ECC is %s\n", ctrl
->ecc_enabled
? "enabled" : "disabled");
310 ctrl
->lanes
= ctrl
->ecc_enabled
? 9 : 8;
313 die("No DIMMs were found");
316 static void save_timings(ramctr_timing
*ctrl
)
318 /* Save the MRC S3 restore data to cbmem */
319 mrc_cache_stash_data(MRC_TRAINING_DATA
, MRC_CACHE_VERSION
, ctrl
, sizeof(*ctrl
));
322 static void reinit_ctrl(ramctr_timing
*ctrl
, const u32 cpuid
)
324 /* Reset internal state */
325 memset(ctrl
, 0, sizeof(*ctrl
));
327 /* Get architecture */
330 /* Get ECC support and mode */
331 ctrl
->ecc_forced
= get_host_ecc_forced();
332 ctrl
->ecc_supported
= ctrl
->ecc_forced
|| get_host_ecc_cap();
333 printk(BIOS_DEBUG
, "ECC supported: %s ECC forced: %s\n",
334 ctrl
->ecc_supported
? "yes" : "no",
335 ctrl
->ecc_forced
? "yes" : "no");
338 static void init_dram_ddr3(int s3resume
, const u32 cpuid
)
340 int me_uma_size
, cbmem_was_inited
, fast_boot
, err
;
342 spd_ddr3_raw_data spds
[4];
344 ramctr_timing
*ctrl_cached
= NULL
;
346 timestamp_add_now(TS_INITRAM_START
);
348 mchbar_setbits32(SAPMCTL
, 1 << 0);
350 /* Wait for ME to be ready */
351 intel_early_me_init();
352 me_uma_size
= intel_early_me_uma_size();
354 printk(BIOS_DEBUG
, "Starting native Platform init\n");
358 wrmsr(0x2e6, (msr_t
) { .lo
= 0, .hi
= 0 });
360 const u32 sskpd
= mchbar_read32(SSKPD
); // !!! = 0x00000000
361 if ((pci_read_config16(SOUTHBRIDGE
, 0xa2) & 0xa0) == 0x20 && sskpd
&& !s3resume
) {
362 mchbar_write32(SSKPD
, 0);
367 early_pch_init_native();
369 early_thermal_init();
371 /* Try to find timings in MRC cache */
372 ctrl_cached
= mrc_cache_current_mmap_leak(MRC_TRAINING_DATA
,
375 if (mrc_size
< sizeof(ctrl
))
378 /* Before reusing training data, assert that the CPU has not been replaced */
379 if (ctrl_cached
&& cpuid
!= ctrl_cached
->cpu
) {
380 /* It is not really worrying on a cold boot, but fatal when resuming from S3 */
381 printk(s3resume
? BIOS_ALERT
: BIOS_NOTICE
,
382 "CPUID %x differs from stored CPUID %x, CPU was replaced!\n",
383 cpuid
, ctrl_cached
->cpu
);
385 /* Invalidate the stored data, it likely does not apply to the current CPU */
389 if (s3resume
&& !ctrl_cached
) {
390 /* S3 resume is impossible, reset to come up cleanly */
394 /* Verify MRC cache for fast boot */
395 if (!s3resume
&& ctrl_cached
) {
396 /* Load SPD unique information data. */
397 memset(spds
, 0, sizeof(spds
));
398 mainboard_get_spd(spds
, 1);
400 /* check SPD CRC16 to make sure the DIMMs haven't been replaced */
401 fast_boot
= verify_crc16_spds_ddr3(spds
, ctrl_cached
);
403 printk(BIOS_DEBUG
, "Stored timings CRC16 mismatch.\n");
405 fast_boot
= s3resume
;
409 printk(BIOS_DEBUG
, "Trying stored timings.\n");
410 memcpy(&ctrl
, ctrl_cached
, sizeof(ctrl
));
412 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
415 /* Failed S3 resume, reset to come up cleanly */
418 /* No need to erase bad MRC cache here, it gets overwritten on a
420 printk(BIOS_ERR
, "Stored timings are invalid !\n");
425 /* Reset internal state */
426 reinit_ctrl(&ctrl
, cpuid
);
428 printk(BIOS_INFO
, "ECC RAM %s.\n", ctrl
.ecc_forced
? "required" :
429 ctrl
.ecc_supported
? "supported" : "unsupported");
431 /* Get DDR3 SPD data */
432 memset(spds
, 0, sizeof(spds
));
433 mainboard_get_spd(spds
, 0);
434 dram_find_spds_ddr3(spds
, &ctrl
);
436 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
440 /* Fallback: disable failing channel */
441 printk(BIOS_ERR
, "RAM training failed, trying fallback.\n");
442 printram("Disable failing channel.\n");
444 /* Reset internal state */
445 reinit_ctrl(&ctrl
, cpuid
);
447 /* Reset DDR3 frequency */
448 dram_find_spds_ddr3(spds
, &ctrl
);
450 /* Disable failing channel */
451 disable_channel(&ctrl
, GET_ERR_CHANNEL(err
));
453 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
457 die("raminit failed");
459 /* FIXME: should be hardware revision-dependent. The register only exists on IVB. */
460 mchbar_write32(CHANNEL_HASH
, 0x00a030ce);
462 set_scrambling_seed(&ctrl
);
464 if (!s3resume
&& ctrl
.ecc_enabled
)
465 channel_scrub(&ctrl
);
467 set_normal_operation(&ctrl
);
469 final_registers(&ctrl
);
471 /* can't do this earlier because it needs to be done in normal operation */
472 if (CONFIG(DEBUG_RAM_SETUP
) && !s3resume
&& ctrl
.ecc_enabled
) {
473 uint32_t i
, tseg
= pci_read_config32(HOST_BRIDGE
, TSEGMB
);
475 printk(BIOS_INFO
, "RAMINIT: ECC scrub test on first channel up to 0x%x\n",
479 * This test helps to debug the ECC scrubbing.
480 * It likely tests every channel/rank, as rank interleave and enhanced
481 * interleave are enabled, but there's no guarantee for it.
484 /* Skip first MB to avoid special case for A-seg and test up to TSEG */
485 for (i
= 1; i
< tseg
>> 20; i
++) {
486 for (int j
= 0; j
< 1 * MiB
; j
+= 4096) {
487 uintptr_t addr
= i
* MiB
+ j
;
488 if (read32((u32
*)addr
) == 0)
491 printk(BIOS_ERR
, "RAMINIT: ECC scrub: DRAM not cleared at"
492 " addr 0x%lx\n", addr
);
496 printk(BIOS_INFO
, "RAMINIT: ECC scrub test done.\n");
500 dram_zones(&ctrl
, 0);
502 intel_early_me_init_done(ME_INIT_STATUS_SUCCESS
);
503 intel_early_me_status();
505 report_memory_config();
507 timestamp_add_now(TS_INITRAM_END
);
509 cbmem_was_inited
= !cbmem_recovery(s3resume
);
512 if (s3resume
&& !cbmem_was_inited
) {
513 /* Failed S3 resume, reset to come up cleanly */
518 setup_sdram_meminfo(&ctrl
);
521 void perform_raminit(int s3resume
)
524 init_dram_ddr3(s3resume
, cpu_get_cpuid());