1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <console/console.h>
8 #include <device/device.h>
9 #include <device/dram/ddr3.h>
10 #include <device/mmio.h>
11 #include <device/pci_ops.h>
12 #include <device/smbus_host.h>
14 #include <timestamp.h>
15 #include <mrc_cache.h>
16 #include <southbridge/intel/bd82x6x/me.h>
17 #include <southbridge/intel/bd82x6x/pch.h>
19 #include <cpu/x86/msr.h>
23 #include "raminit_common.h"
24 #include "sandybridge.h"
27 /* FIXME: no support for 3-channel chipsets */
29 static void wait_txt_clear(void)
31 struct cpuid_result cp
= cpuid_ext(1, 0);
33 /* Check if TXT is supported */
34 if (!(cp
.ecx
& (1 << 6)))
37 /* Some TXT public bit */
38 if (!(read32p(0xfed30010) & 1))
41 /* Wait for TXT clear */
42 while (!(read8p(0xfed40000) & (1 << 7)))
46 /* Disable a channel in ramctr_timing */
47 static void disable_channel(ramctr_timing
*ctrl
, int channel
)
49 ctrl
->rankmap
[channel
] = 0;
51 memset(&ctrl
->rank_mirror
[channel
][0], 0, sizeof(ctrl
->rank_mirror
[0]));
53 ctrl
->channel_size_mb
[channel
] = 0;
54 ctrl
->cmd_stretch
[channel
] = 0;
55 ctrl
->mad_dimm
[channel
] = 0;
56 memset(&ctrl
->timings
[channel
][0], 0, sizeof(ctrl
->timings
[0]));
57 memset(&ctrl
->info
.dimm
[channel
][0], 0, sizeof(ctrl
->info
.dimm
[0]));
60 static uint8_t nb_get_ecc_type(const uint32_t capid0_a
)
62 return capid0_a
& CAPID_ECCDIS
? MEMORY_ARRAY_ECC_NONE
: MEMORY_ARRAY_ECC_SINGLE_BIT
;
65 static uint16_t nb_slots_per_channel(const uint32_t capid0_a
)
67 return !(capid0_a
& CAPID_DDPCD
) + 1;
70 static uint16_t nb_number_of_channels(const uint32_t capid0_a
)
72 return !(capid0_a
& CAPID_PDCD
) + 1;
75 static uint32_t nb_max_chan_capacity_mib(const uint32_t capid0_a
)
79 /* Values from documentation, which assume two DIMMs per channel */
80 switch (CAPID_DDRSZ(capid0_a
)) {
95 /* Account for the maximum number of DIMMs per channel */
96 return (ddrsz
/ 2) * nb_slots_per_channel(capid0_a
);
99 /* Fill cbmem with information for SMBIOS type 16 and type 17 */
100 static void setup_sdram_meminfo(ramctr_timing
*ctrl
)
103 const u16 ddr_freq
= (1000 << 8) / ctrl
->tCK
;
105 FOR_ALL_CHANNELS
for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
106 enum cb_err ret
= spd_add_smbios17(channel
, slot
, ddr_freq
,
107 &ctrl
->info
.dimm
[channel
][slot
]);
108 if (ret
!= CB_SUCCESS
)
109 printk(BIOS_ERR
, "RAMINIT: Failed to add SMBIOS17\n");
112 /* The 'spd_add_smbios17' function allocates this CBMEM area */
113 struct memory_info
*m
= cbmem_find(CBMEM_ID_MEMINFO
);
117 const uint32_t capid0_a
= pci_read_config32(HOST_BRIDGE
, CAPID0_A
);
119 const uint16_t channels
= nb_number_of_channels(capid0_a
);
121 m
->ecc_type
= nb_get_ecc_type(capid0_a
);
122 m
->max_capacity_mib
= channels
* nb_max_chan_capacity_mib(capid0_a
);
123 m
->number_of_devices
= channels
* nb_slots_per_channel(capid0_a
);
126 /* Return CRC16 match for all SPDs */
127 static int verify_crc16_spds_ddr3(spd_ddr3_raw_data
*spd
, ramctr_timing
*ctrl
)
129 int channel
, slot
, spd_slot
;
133 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
134 spd_slot
= 2 * channel
+ slot
;
135 match
&= ctrl
->spd_crc
[channel
][slot
] ==
136 spd_ddr3_calc_unique_crc(spd
[spd_slot
], sizeof(spd_ddr3_raw_data
));
142 static void read_spd(spd_ddr3_raw_data
*spd
, u8 addr
, bool id_only
)
146 for (j
= SPD_DDR3_MOD_ID1
; j
< 128; j
++)
147 (*spd
)[j
] = smbus_read_byte(addr
, j
);
149 for (j
= 0; j
< SPD_SIZE_MAX_DDR3
; j
++)
150 (*spd
)[j
] = smbus_read_byte(addr
, j
);
154 static void mainboard_get_spd(spd_ddr3_raw_data
*spd
, bool id_only
)
156 const struct northbridge_intel_sandybridge_config
*cfg
= config_of_soc();
159 if (CONFIG(HAVE_SPD_IN_CBFS
)) {
160 struct spd_info spdi
= {0};
162 mb_get_spd_map(&spdi
);
165 uint8_t *spd_file
= cbfs_map("spd.bin", &spd_file_len
);
167 printk(BIOS_DEBUG
, "SPD index %d\n", spdi
.spd_index
);
169 /* SPD file sanity check */
171 die("SPD data %s!", "not found");
173 if (spd_file_len
< ((spdi
.spd_index
+ 1) * SPD_SIZE_MAX_DDR3
))
174 die("SPD data %s!", "incomplete");
177 * Copy SPD data specified by spd_info.spd_index to all slots marked as
180 * Read SPD data from slots with a real SMBus address.
182 for (i
= 0; i
< ARRAY_SIZE(spdi
.addresses
); i
++) {
183 if (spdi
.addresses
[i
] == SPD_MEMORY_DOWN
)
184 memcpy(&spd
[i
], spd_file
+ (spdi
.spd_index
* SPD_SIZE_MAX_DDR3
), SPD_SIZE_MAX_DDR3
);
185 else if (spdi
.addresses
[i
] != 0)
186 read_spd(&spd
[i
], spdi
.addresses
[i
], id_only
);
189 for (i
= 0; i
< ARRAY_SIZE(cfg
->spd_addresses
); i
++) {
190 if (cfg
->spd_addresses
[i
] != 0)
191 read_spd(&spd
[i
], cfg
->spd_addresses
[i
], id_only
);
193 } /* CONFIG(HAVE_SPD_IN_CBFS) */
196 static void dram_find_spds_ddr3(spd_ddr3_raw_data
*spd
, ramctr_timing
*ctrl
)
198 int dimms
= 0, ch_dimms
;
199 int channel
, slot
, spd_slot
;
200 bool can_use_ecc
= ctrl
->ecc_supported
;
202 memset(ctrl
->rankmap
, 0, sizeof(ctrl
->rankmap
));
204 ctrl
->extended_temperature_range
= 1;
205 ctrl
->auto_self_refresh
= 1;
208 ctrl
->channel_size_mb
[channel
] = 0;
211 /* Count dimms on channel */
212 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
213 spd_slot
= 2 * channel
+ slot
;
215 if (spd
[spd_slot
][SPD_MEMORY_TYPE
] == SPD_MEMORY_TYPE_SDRAM_DDR3
)
219 for (slot
= 0; slot
< NUM_SLOTS
; slot
++) {
220 spd_slot
= 2 * channel
+ slot
;
221 printk(BIOS_DEBUG
, "SPD probe channel%d, slot%d\n", channel
, slot
);
223 struct dimm_attr_ddr3_st
*const dimm
= &ctrl
->info
.dimm
[channel
][slot
];
225 /* Search for XMP profile */
226 spd_xmp_decode_ddr3(dimm
, spd
[spd_slot
],
229 if (dimm
->dram_type
!= SPD_MEMORY_TYPE_SDRAM_DDR3
) {
230 printram("No valid XMP profile found.\n");
231 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
233 } else if (ch_dimms
> dimm
->dimms_per_channel
) {
235 "XMP profile supports %u DIMMs, but %u DIMMs are installed.\n",
236 dimm
->dimms_per_channel
, ch_dimms
);
238 if (CONFIG(NATIVE_RAMINIT_IGNORE_XMP_MAX_DIMMS
))
240 "XMP maximum DIMMs will be ignored.\n");
242 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
244 } else if (dimm
->voltage
!= 1500) {
245 /* TODO: Support DDR3 voltages other than 1500mV */
246 printram("XMP profile's requested %u mV is unsupported.\n",
249 if (CONFIG(NATIVE_RAMINIT_IGNORE_XMP_REQUESTED_VOLTAGE
))
251 "XMP requested voltage will be ignored.\n");
253 spd_decode_ddr3(dimm
, spd
[spd_slot
]);
256 /* Fill in CRC16 for MRC cache */
257 ctrl
->spd_crc
[channel
][slot
] =
258 spd_ddr3_calc_unique_crc(spd
[spd_slot
], sizeof(spd_ddr3_raw_data
));
260 if (dimm
->dram_type
!= SPD_MEMORY_TYPE_SDRAM_DDR3
) {
261 /* Mark DIMM as invalid */
267 dram_print_spd_ddr3(dimm
);
269 ctrl
->rank_mirror
[channel
][slot
* 2] = 0;
270 ctrl
->rank_mirror
[channel
][slot
* 2 + 1] = dimm
->flags
.pins_mirrored
;
272 ctrl
->channel_size_mb
[channel
] += dimm
->size_mb
;
274 if (!dimm
->flags
.is_ecc
)
277 ctrl
->auto_self_refresh
&= dimm
->flags
.asr
;
279 ctrl
->extended_temperature_range
&= dimm
->flags
.ext_temp_refresh
;
281 ctrl
->rankmap
[channel
] |= ((1 << dimm
->ranks
) - 1) << (2 * slot
);
283 printk(BIOS_DEBUG
, "channel[%d] rankmap = 0x%x\n", channel
,
284 ctrl
->rankmap
[channel
]);
287 const u8 rc_0
= ctrl
->info
.dimm
[channel
][0].reference_card
;
288 const u8 rc_1
= ctrl
->info
.dimm
[channel
][1].reference_card
;
290 if (ch_dimms
== NUM_SLOTS
&& rc_0
< 6 && rc_1
< 6) {
291 const int ref_card_offset_table
[6][6] = {
292 { 0, 0, 0, 0, 2, 2 },
293 { 0, 0, 0, 0, 2, 2 },
294 { 0, 0, 0, 0, 2, 2 },
295 { 0, 0, 0, 0, 1, 1 },
296 { 2, 2, 2, 1, 0, 0 },
297 { 2, 2, 2, 1, 0, 0 },
299 ctrl
->ref_card_offset
[channel
] = ref_card_offset_table
[rc_0
][rc_1
];
301 ctrl
->ref_card_offset
[channel
] = 0;
305 if (ctrl
->ecc_forced
|| CONFIG(RAMINIT_ENABLE_ECC
))
306 ctrl
->ecc_enabled
= can_use_ecc
;
307 if (ctrl
->ecc_forced
&& !ctrl
->ecc_enabled
)
308 die("ECC mode forced but non-ECC DIMM installed!");
309 printk(BIOS_DEBUG
, "ECC is %s\n", ctrl
->ecc_enabled
? "enabled" : "disabled");
311 ctrl
->lanes
= ctrl
->ecc_enabled
? 9 : 8;
314 die("No DIMMs were found");
317 static void save_timings(ramctr_timing
*ctrl
)
319 /* Save the MRC S3 restore data to cbmem */
320 mrc_cache_stash_data(MRC_TRAINING_DATA
, MRC_CACHE_VERSION
, ctrl
, sizeof(*ctrl
));
323 static void reinit_ctrl(ramctr_timing
*ctrl
, const u32 cpuid
)
325 /* Reset internal state */
326 memset(ctrl
, 0, sizeof(*ctrl
));
328 /* Get architecture */
331 /* Get ECC support and mode */
332 ctrl
->ecc_forced
= get_host_ecc_forced();
333 ctrl
->ecc_supported
= ctrl
->ecc_forced
|| get_host_ecc_cap();
334 printk(BIOS_DEBUG
, "ECC supported: %s ECC forced: %s\n",
335 ctrl
->ecc_supported
? "yes" : "no",
336 ctrl
->ecc_forced
? "yes" : "no");
339 static void init_dram_ddr3(int s3resume
, const u32 cpuid
)
341 int me_uma_size
, cbmem_was_inited
, fast_boot
, err
;
343 spd_ddr3_raw_data spds
[4];
345 ramctr_timing
*ctrl_cached
= NULL
;
347 timestamp_add_now(TS_INITRAM_START
);
349 mchbar_setbits32(SAPMCTL
, 1 << 0);
351 /* Wait for ME to be ready */
352 intel_early_me_init();
353 me_uma_size
= intel_early_me_uma_size();
355 printk(BIOS_DEBUG
, "Starting native Platform init\n");
359 wrmsr(0x2e6, (msr_t
) { .lo
= 0, .hi
= 0 });
361 const u32 sskpd
= mchbar_read32(SSKPD
); // !!! = 0x00000000
362 if ((pci_read_config16(SOUTHBRIDGE
, 0xa2) & 0xa0) == 0x20 && sskpd
&& !s3resume
) {
363 mchbar_write32(SSKPD
, 0);
368 early_pch_init_native();
370 early_thermal_init();
372 /* Try to find timings in MRC cache */
373 ctrl_cached
= mrc_cache_current_mmap_leak(MRC_TRAINING_DATA
,
376 if (mrc_size
< sizeof(ctrl
))
379 /* Before reusing training data, assert that the CPU has not been replaced */
380 if (ctrl_cached
&& cpuid
!= ctrl_cached
->cpu
) {
381 /* It is not really worrying on a cold boot, but fatal when resuming from S3 */
382 printk(s3resume
? BIOS_ALERT
: BIOS_NOTICE
,
383 "CPUID %x differs from stored CPUID %x, CPU was replaced!\n",
384 cpuid
, ctrl_cached
->cpu
);
386 /* Invalidate the stored data, it likely does not apply to the current CPU */
390 if (s3resume
&& !ctrl_cached
) {
391 /* S3 resume is impossible, reset to come up cleanly */
395 /* Verify MRC cache for fast boot */
396 if (!s3resume
&& ctrl_cached
) {
397 /* Load SPD unique information data. */
398 memset(spds
, 0, sizeof(spds
));
399 mainboard_get_spd(spds
, 1);
401 /* check SPD CRC16 to make sure the DIMMs haven't been replaced */
402 fast_boot
= verify_crc16_spds_ddr3(spds
, ctrl_cached
);
404 printk(BIOS_DEBUG
, "Stored timings CRC16 mismatch.\n");
406 fast_boot
= s3resume
;
410 printk(BIOS_DEBUG
, "Trying stored timings.\n");
411 memcpy(&ctrl
, ctrl_cached
, sizeof(ctrl
));
413 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
416 /* Failed S3 resume, reset to come up cleanly */
419 /* No need to erase bad MRC cache here, it gets overwritten on a
421 printk(BIOS_ERR
, "Stored timings are invalid !\n");
426 /* Reset internal state */
427 reinit_ctrl(&ctrl
, cpuid
);
429 printk(BIOS_INFO
, "ECC RAM %s.\n", ctrl
.ecc_forced
? "required" :
430 ctrl
.ecc_supported
? "supported" : "unsupported");
432 /* Get DDR3 SPD data */
433 memset(spds
, 0, sizeof(spds
));
434 mainboard_get_spd(spds
, 0);
435 dram_find_spds_ddr3(spds
, &ctrl
);
437 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
441 /* Fallback: disable failing channel */
442 printk(BIOS_ERR
, "RAM training failed, trying fallback.\n");
443 printram("Disable failing channel.\n");
445 /* Reset internal state */
446 reinit_ctrl(&ctrl
, cpuid
);
448 /* Reset DDR3 frequency */
449 dram_find_spds_ddr3(spds
, &ctrl
);
451 /* Disable failing channel */
452 disable_channel(&ctrl
, GET_ERR_CHANNEL(err
));
454 err
= try_init_dram_ddr3(&ctrl
, fast_boot
, s3resume
, me_uma_size
);
458 die("raminit failed");
460 /* FIXME: should be hardware revision-dependent. The register only exists on IVB. */
461 mchbar_write32(CHANNEL_HASH
, 0x00a030ce);
463 set_scrambling_seed(&ctrl
);
465 if (!s3resume
&& ctrl
.ecc_enabled
)
466 channel_scrub(&ctrl
);
468 set_normal_operation(&ctrl
);
470 final_registers(&ctrl
);
472 /* can't do this earlier because it needs to be done in normal operation */
473 if (CONFIG(DEBUG_RAM_SETUP
) && !s3resume
&& ctrl
.ecc_enabled
) {
474 uint32_t i
, tseg
= pci_read_config32(HOST_BRIDGE
, TSEGMB
);
476 printk(BIOS_INFO
, "RAMINIT: ECC scrub test on first channel up to 0x%x\n",
480 * This test helps to debug the ECC scrubbing.
481 * It likely tests every channel/rank, as rank interleave and enhanced
482 * interleave are enabled, but there's no guarantee for it.
485 /* Skip first MB to avoid special case for A-seg and test up to TSEG */
486 for (i
= 1; i
< tseg
>> 20; i
++) {
487 for (int j
= 0; j
< 1 * MiB
; j
+= 4096) {
488 uintptr_t addr
= i
* MiB
+ j
;
489 if (read32((u32
*)addr
) == 0)
492 printk(BIOS_ERR
, "RAMINIT: ECC scrub: DRAM not cleared at"
493 " addr 0x%lx\n", addr
);
497 printk(BIOS_INFO
, "RAMINIT: ECC scrub test done.\n");
501 dram_zones(&ctrl
, 0);
503 intel_early_me_init_done(ME_INIT_STATUS_SUCCESS
);
504 intel_early_me_status();
506 report_memory_config();
508 timestamp_add_now(TS_INITRAM_END
);
510 cbmem_was_inited
= !cbmem_recovery(s3resume
);
513 if (s3resume
&& !cbmem_was_inited
) {
514 /* Failed S3 resume, reset to come up cleanly */
519 setup_sdram_meminfo(&ctrl
);
522 void perform_raminit(int s3resume
)
525 init_dram_ddr3(s3resume
, cpu_get_cpuid());