1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #include <commonlib/helpers.h>
6 #include <device/mmio.h>
7 #include <device/pci_ops.h>
8 #include <device/pci_def.h>
9 #include <device/device.h>
10 #include <device/smbus_host.h>
12 #include <console/console.h>
15 #include <timestamp.h>
19 static const gmch_gfx_t gmch_gfx_types
[][5] = {
20 /* MAX_667MHz MAX_533MHz MAX_400MHz MAX_333MHz MAX_800MHz */
21 { GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
},
22 { GMCH_GM47
, GMCH_GM45
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_GM49
},
23 { GMCH_GE45
, GMCH_GE45
, GMCH_GE45
, GMCH_GE45
, GMCH_GE45
},
24 { GMCH_UNKNOWN
, GMCH_GL43
, GMCH_GL40
, GMCH_UNKNOWN
, GMCH_UNKNOWN
},
25 { GMCH_UNKNOWN
, GMCH_GS45
, GMCH_GS40
, GMCH_UNKNOWN
, GMCH_UNKNOWN
},
26 { GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
},
27 { GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
, GMCH_UNKNOWN
},
28 { GMCH_PM45
, GMCH_PM45
, GMCH_PM45
, GMCH_PM45
, GMCH_PM45
},
31 void get_gmch_info(sysinfo_t
*sysinfo
)
33 sysinfo
->stepping
= pci_read_config8(PCI_DEV(0, 0, 0), PCI_CLASS_REVISION
);
34 if ((sysinfo
->stepping
> STEPPING_B3
) &&
35 (sysinfo
->stepping
!= STEPPING_CONVERSION_A1
))
36 die("Unknown stepping.\n");
37 if (sysinfo
->stepping
<= STEPPING_B3
)
38 printk(BIOS_DEBUG
, "Stepping %c%d\n", 'A' + sysinfo
->stepping
/ 4, sysinfo
->stepping
% 4);
40 printk(BIOS_DEBUG
, "Conversion stepping A1\n");
42 const u32 eax
= cpuid_ext(0x04, 0).eax
;
43 sysinfo
->cores
= ((eax
>> 26) & 0x3f) + 1;
44 printk(BIOS_SPEW
, "%d CPU cores\n", sysinfo
->cores
);
46 u32 capid
= pci_read_config16(PCI_DEV(0, 0, 0), D0F0_CAPID0
+8);
47 if (!(capid
& (1<<(79-64)))) {
48 printk(BIOS_SPEW
, "iTPM enabled\n");
51 capid
= pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0
+4);
52 if (!(capid
& (1<<(57-32)))) {
53 printk(BIOS_SPEW
, "ME enabled\n");
56 if (!(capid
& (1<<(56-32)))) {
57 printk(BIOS_SPEW
, "AMT enabled\n");
60 sysinfo
->max_ddr2_mt
= (capid
& (1<<(53-32)))?667:800;
61 printk(BIOS_SPEW
, "capable of DDR2 of %d MHz or lower\n", sysinfo
->max_ddr2_mt
);
63 if (!(capid
& (1<<(48-32)))) {
64 printk(BIOS_SPEW
, "VT-d enabled\n");
67 const u32 gfx_variant
= (capid
>>(42-32)) & 0x7;
68 const u32 render_freq
= ((capid
>>(50-32) & 0x1) << 2) | ((capid
>>(35-32)) & 0x3);
70 sysinfo
->gfx_type
= gmch_gfx_types
[gfx_variant
][render_freq
];
72 sysinfo
->gfx_type
= GMCH_UNKNOWN
;
73 switch (sysinfo
->gfx_type
) {
75 printk(BIOS_SPEW
, "GMCH: GM45\n");
78 printk(BIOS_SPEW
, "GMCH: GM47\n");
81 printk(BIOS_SPEW
, "GMCH: GM49\n");
84 printk(BIOS_SPEW
, "GMCH: GE45\n");
87 printk(BIOS_SPEW
, "GMCH: GL40\n");
90 printk(BIOS_SPEW
, "GMCH: GL43\n");
93 printk(BIOS_SPEW
, "GMCH: GS40\n");
96 printk(BIOS_SPEW
, "GMCH: GS45, using %s-power mode\n",
97 sysinfo
->gs45_low_power_mode
? "low" : "high");
100 printk(BIOS_SPEW
, "GMCH: PM45\n");
103 printk(BIOS_SPEW
, "unknown GMCH\n");
107 sysinfo
->txt_enabled
= !(capid
& (1 << (37-32)));
108 if (sysinfo
->txt_enabled
) {
109 printk(BIOS_SPEW
, "TXT enabled\n");
112 switch (render_freq
) {
114 sysinfo
->max_render_mhz
= 800;
117 sysinfo
->max_render_mhz
= 667;
120 sysinfo
->max_render_mhz
= 533;
123 sysinfo
->max_render_mhz
= 400;
126 sysinfo
->max_render_mhz
= 333;
129 printk(BIOS_SPEW
, "Unknown render frequency\n");
130 sysinfo
->max_render_mhz
= 0;
133 if (sysinfo
->max_render_mhz
!= 0) {
134 printk(BIOS_SPEW
, "Render frequency: %d MHz\n", sysinfo
->max_render_mhz
);
137 if (!(capid
& (1<<(33-32)))) {
138 printk(BIOS_SPEW
, "IGD enabled\n");
141 if (!(capid
& (1<<(32-32)))) {
142 printk(BIOS_SPEW
, "PCIe-to-GMCH enabled\n");
145 capid
= pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0
);
147 u32 ddr_cap
= capid
>>30 & 0x3;
150 sysinfo
->max_ddr3_mt
= 1067;
153 sysinfo
->max_ddr3_mt
= 800;
157 printk(BIOS_SPEW
, "GMCH not DDR3 capable\n");
158 sysinfo
->max_ddr3_mt
= 0;
161 if (sysinfo
->max_ddr3_mt
!= 0) {
162 printk(BIOS_SPEW
, "GMCH supports DDR3 with %d MT or less\n", sysinfo
->max_ddr3_mt
);
165 const unsigned int max_fsb
= (capid
>> 28) & 0x3;
168 sysinfo
->max_fsb_mhz
= 1067;
171 sysinfo
->max_fsb_mhz
= 800;
174 sysinfo
->max_fsb_mhz
= 667;
177 die("unknown FSB capability\n");
180 if (sysinfo
->max_fsb_mhz
!= 0) {
181 printk(BIOS_SPEW
, "GMCH supports FSB with up to %d MHz\n", sysinfo
->max_fsb_mhz
);
183 sysinfo
->max_fsb
= max_fsb
- 1;
187 * Detect if the system went through an interrupted RAM init or is incon-
188 * sistent. If so, initiate a cold reboot. Otherwise mark the system to be
189 * in RAM init, so this function would detect it on an erroneous reboot.
191 void enter_raminit_or_reset(void)
193 /* Interrupted RAM init or inconsistent system? */
194 u8 reg8
= pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa2);
196 if (reg8
& (1 << 2)) { /* S4-assertion-width violation */
197 /* Ignore S4-assertion-width violation like original BIOS. */
198 printk(BIOS_WARNING
, "Ignoring S4-assertion-width violation.\n");
199 /* Bit2 is R/WC, so it will clear itself below. */
202 if (reg8
& (1 << 7)) { /* interrupted RAM init */
203 /* Don't enable S4-assertion stretch. Makes trouble on roda/rk9.
204 reg8 = pci_read_config8(PCI_DEV(0, 0x1f, 0), 0xa4);
205 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa4, reg8 | 0x08);
209 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8
& ~(1 << 7));
211 printk(BIOS_INFO
, "Interrupted RAM init, reset required.\n");
214 /* Mark system to be in RAM init. */
215 pci_write_config8(PCI_DEV(0, 0x1f, 0), 0xa2, reg8
| (1 << 7));
218 /* For a detected DIMM, test the value of an SPD byte to
219 match the expected value after masking some bits. */
220 static int test_dimm(sysinfo_t
*const sysinfo
,
221 int dimm
, int addr
, int bitmask
, int expected
)
223 return (smbus_read_byte(sysinfo
->spd_map
[dimm
], addr
) & bitmask
) == expected
;
226 /* This function dies if dimm is unsuitable for the chipset. */
227 static void verify_ddr2_dimm(sysinfo_t
*const sysinfo
, int dimm
)
229 if (!test_dimm(sysinfo
, dimm
, 20, 0x04, 0x04))
230 die("Chipset only supports SO-DIMM\n");
232 if (!test_dimm(sysinfo
, dimm
, 6, 0xff, 0x40) ||
233 !test_dimm(sysinfo
, dimm
, 11, 0xff, 0x00))
234 die("Chipset doesn't support ECC RAM\n");
236 if (!test_dimm(sysinfo
, dimm
, 5, 0x07, 0) &&
237 !test_dimm(sysinfo
, dimm
, 5, 0x07, 1))
238 die("Chipset wants single or dual ranked DIMMs\n");
241 * Generally supports:
244 * 10 column address bits
245 * 13, 14 or 15 (x8 only) row address bits
247 * FIXME: There seems to be an exception for 256Gb x16 chips. Not
248 * covered by the numbers above (9 column address bits?).
250 if (!test_dimm(sysinfo
, dimm
, 13, 0xff, 8) &&
251 !test_dimm(sysinfo
, dimm
, 13, 0xff, 16))
252 die("Chipset requires x8 or x16 width\n");
254 if (!test_dimm(sysinfo
, dimm
, 17, 0xff, 4) &&
255 !test_dimm(sysinfo
, dimm
, 17, 0xff, 8))
256 die("Chipset requires 4 or 8 banks\n");
258 if (!test_dimm(sysinfo
, dimm
, 4, 0xff, 10))
259 die("Chipset requires 10 column address bits\n");
261 if (!test_dimm(sysinfo
, dimm
, 3, 0xff, 13) &&
262 !test_dimm(sysinfo
, dimm
, 3, 0xff, 14) &&
263 !(test_dimm(sysinfo
, dimm
, 3, 0xff, 15) &&
264 test_dimm(sysinfo
, dimm
, 13, 0xff, 8)))
265 die("Chipset requires 13, 14 or 15 (with x8) row address bits");
268 /* For every detected DIMM, test if it's suitable for the chipset. */
269 static void verify_ddr2(sysinfo_t
*const sysinfo
, int mask
)
272 for (cur
= 0; mask
; mask
>>= 1, ++cur
) {
274 verify_ddr2_dimm(sysinfo
, cur
);
278 /* This function dies if dimm is unsuitable for the chipset. */
279 static void verify_ddr3_dimm(sysinfo_t
*const sysinfo
, int dimm
)
281 if (!test_dimm(sysinfo
, dimm
, 3, 15, 3))
282 die("Chipset only supports SO-DIMM\n");
284 if (!test_dimm(sysinfo
, dimm
, 8, 0x18, 0))
285 die("Chipset doesn't support ECC RAM\n");
287 if (!test_dimm(sysinfo
, dimm
, 7, 0x38, 0) &&
288 !test_dimm(sysinfo
, dimm
, 7, 0x38, 8))
289 die("Chipset wants single or double sided DIMMs\n");
291 if (!test_dimm(sysinfo
, dimm
, 7, 7, 1) &&
292 !test_dimm(sysinfo
, dimm
, 7, 7, 2))
293 die("Chipset requires x8 or x16 width\n");
295 if (!test_dimm(sysinfo
, dimm
, 4, 0x0f, 0) &&
296 !test_dimm(sysinfo
, dimm
, 4, 0x0f, 1) &&
297 !test_dimm(sysinfo
, dimm
, 4, 0x0f, 2) &&
298 !test_dimm(sysinfo
, dimm
, 4, 0x0f, 3))
299 die("Chipset requires 256Mb, 512Mb, 1Gb or 2Gb chips.");
301 if (!test_dimm(sysinfo
, dimm
, 4, 0x70, 0))
302 die("Chipset requires 8 banks on DDR3\n");
304 /* How to check if burst length is 8?
305 Other values are not supported, are they even possible? */
307 if (!test_dimm(sysinfo
, dimm
, 10, 0xff, 1))
308 die("Code assumes 1/8ns MTB\n");
310 if (!test_dimm(sysinfo
, dimm
, 11, 0xff, 8))
311 die("Code assumes 1/8ns MTB\n");
313 if (!test_dimm(sysinfo
, dimm
, 62, 0x9f, 0) &&
314 !test_dimm(sysinfo
, dimm
, 62, 0x9f, 1) &&
315 !test_dimm(sysinfo
, dimm
, 62, 0x9f, 2) &&
316 !test_dimm(sysinfo
, dimm
, 62, 0x9f, 3) &&
317 !test_dimm(sysinfo
, dimm
, 62, 0x9f, 5))
318 die("Only raw card types A, B, C, D and F are supported.\n");
321 /* For every detected DIMM, test if it's suitable for the chipset. */
322 static void verify_ddr3(sysinfo_t
*const sysinfo
, int mask
)
327 verify_ddr3_dimm(sysinfo
, cur
);
336 struct spd_dimminfo
{
339 unsigned int chip_capacity
;
342 unsigned int cas_latencies
;
350 unsigned int page_size
;
351 unsigned int raw_card
;
352 unsigned int refresh
;
356 * \brief Decode SPD tck cycle time
358 * Decodes a raw SPD data from a DDR2 DIMM.
359 * Returns cycle time in 1/256th ns.
361 static unsigned int spd_decode_tck_time(u8 c
)
382 die("Invalid tck setting. lower nibble is 0x%x\n", c
& 0xf);
384 low
= (c
& 0xf) * 10;
387 return ((high
* 100 + low
) << 8) / 100;
389 static void collect_ddr2_dimm(struct spd_dimminfo
*const di
, const int smb_addr
)
391 static const int tCK_offsets
[] = { 9, 23, 25 };
393 di
->rows
= smbus_read_byte(smb_addr
, 3);
394 di
->cols
= smbus_read_byte(smb_addr
, 4);
395 di
->banks
= smbus_read_byte(smb_addr
, 17);
396 di
->width
= smbus_read_byte(smb_addr
, 13) / 8; /* in bytes */
398 /* 0: 256Mb .. 3: 2Gb */
401 + (di
->width
== 1 ? 3 : 4) /* 1B: 2^3 bits, 2B: 2^4 bits */
402 + (di
->banks
== 4 ? 2 : 3) /* 4 banks: 2^2, 8 banks: 2^3 */
405 di
->page_size
= di
->width
* (1 << di
->cols
); /* in bytes */
407 di
->ranks
= (smbus_read_byte(smb_addr
, 5) & 7) + 1;
409 di
->cas_latencies
= smbus_read_byte(smb_addr
, 18);
410 /* assuming tCKmin for the highest CAS is the absolute minimum */
411 di
->tCKmin
= spd_decode_tck_time(smbus_read_byte(smb_addr
, 9));
413 /* try to reconstruct tAAmin from available data (I hate DDR2 SPDs) */
415 unsigned int cas
= 7;
416 di
->tAAmin
= UINT32_MAX
; /* we don't have UINT_MAX? */
417 for (i
= 0; i
< ARRAY_SIZE(tCK_offsets
); ++i
, --cas
) {
418 for (; cas
> 1; --cas
)
419 if (di
->cas_latencies
& (1 << cas
))
424 const unsigned int tCK_enc
=
425 smbus_read_byte(smb_addr
, tCK_offsets
[i
]);
426 const unsigned int tAA
= spd_decode_tck_time(tCK_enc
) * cas
;
427 if (tAA
< di
->tAAmin
)
431 /* convert to 1/256ns */
432 di
->tRAS
= smbus_read_byte(smb_addr
, 30) << 8; /* given in ns */
433 di
->tRP
= smbus_read_byte(smb_addr
, 27) << 6; /* given in 1/4ns */
434 di
->tRCD
= smbus_read_byte(smb_addr
, 29) << 6; /* given in 1/4ns */
435 di
->tWR
= smbus_read_byte(smb_addr
, 36) << 6; /* given in 1/4ns */
437 di
->raw_card
= 0; /* Use same path as for DDR3 type A. */
438 di
->refresh
= smbus_read_byte(smb_addr
, 12);
441 * This function collects RAM characteristics from SPD, assuming that RAM
442 * is generally within chipset's requirements, since verify_ddr2() passed.
444 static void collect_ddr2(sysinfo_t
*const sysinfo
, spdinfo_t
*const config
)
447 for (cur
= 0; cur
< 2; ++cur
) {
448 if (config
->dimm_mask
& (1 << (2 * cur
))) {
449 collect_ddr2_dimm(&config
->channel
[cur
],
450 sysinfo
->spd_map
[2 * cur
]);
456 * This function collects RAM characteristics from SPD, assuming that RAM
457 * is generally within chipset's requirements, since verify_ddr3() passed.
459 static void collect_ddr3(sysinfo_t
*const sysinfo
, spdinfo_t
*const config
)
461 int mask
= config
->dimm_mask
;
464 /* FIXME: support several dimms on same channel. */
465 if ((mask
& 1) && sysinfo
->spd_map
[2 * cur
]) {
467 const int smb_addr
= sysinfo
->spd_map
[2 * cur
];
469 config
->channel
[cur
].rows
= ((smbus_read_byte(smb_addr
, 5) >> 3) & 7) + 12;
470 config
->channel
[cur
].cols
= (smbus_read_byte(smb_addr
, 5) & 7) + 9;
472 config
->channel
[cur
].chip_capacity
= smbus_read_byte(smb_addr
, 4) & 0xf;
474 config
->channel
[cur
].banks
= 8; /* GM45 only accepts this for DDR3.
475 verify_ddr3() fails for other values. */
476 config
->channel
[cur
].ranks
= ((smbus_read_byte(smb_addr
, 7) >> 3) & 7) + 1;
478 config
->channel
[cur
].cas_latencies
=
479 ((smbus_read_byte(smb_addr
, 15) << 8) | smbus_read_byte(smb_addr
, 14))
480 << 4; /* so bit x is CAS x */
481 config
->channel
[cur
].tAAmin
= smbus_read_byte(smb_addr
, 16) * 32; /* convert from MTB to 1/256 ns */
482 config
->channel
[cur
].tCKmin
= smbus_read_byte(smb_addr
, 12) * 32; /* convert from MTB to 1/256 ns */
484 config
->channel
[cur
].width
= smbus_read_byte(smb_addr
, 7) & 7;
485 config
->channel
[cur
].page_size
= config
->channel
[cur
].width
*
486 (1 << config
->channel
[cur
].cols
); /* in Bytes */
488 tmp
= smbus_read_byte(smb_addr
, 21);
489 config
->channel
[cur
].tRAS
= (smbus_read_byte(smb_addr
, 22) | ((tmp
& 0xf) << 8)) * 32;
490 config
->channel
[cur
].tRP
= smbus_read_byte(smb_addr
, 20) * 32;
491 config
->channel
[cur
].tRCD
= smbus_read_byte(smb_addr
, 18) * 32;
492 config
->channel
[cur
].tWR
= smbus_read_byte(smb_addr
, 17) * 32;
494 config
->channel
[cur
].raw_card
= smbus_read_byte(smb_addr
, 62) & 0x1f;
495 config
->channel
[cur
].refresh
= REFRESH_7_8
;
502 static fsb_clock_t
read_fsb_clock(void)
504 switch (mchbar_read32(CLKCFG_MCHBAR
) & CLKCFG_FSBCLK_MASK
) {
506 return FSB_CLOCK_1067MHz
;
508 return FSB_CLOCK_800MHz
;
510 return FSB_CLOCK_667MHz
;
512 die("Unsupported FSB clock.\n");
515 static mem_clock_t
clock_index(const unsigned int clock
)
518 case 533: return MEM_CLOCK_533MHz
;
519 case 400: return MEM_CLOCK_400MHz
;
520 case 333: return MEM_CLOCK_333MHz
;
521 default: die("Unknown clock value.\n");
523 return -1; /* Won't be reached. */
525 static void normalize_clock(unsigned int *const clock
)
529 else if (*clock
>= 400)
531 else if (*clock
>= 333)
536 static void lower_clock(unsigned int *const clock
)
539 normalize_clock(clock
);
541 static unsigned int find_common_clock_cas(sysinfo_t
*const sysinfo
,
542 const spdinfo_t
*const spdinfo
)
544 /* various constraints must be fulfilled:
545 CAS * tCK < 20ns == 160MTB
546 tCK_max >= tCK >= tCK_min
547 CAS >= roundup(tAA_min/tCK)
549 Clock(MHz) = 1000 / tCK(ns)
550 Clock(MHz) = 8000 / tCK(MTB)
551 AND BTW: Clock(MT) = 2000 / tCK(ns) - intel uses MTs but calls them MHz
555 /* Calculate common cas_latencies mask, tCKmin and tAAmin. */
556 unsigned int cas_latencies
= (unsigned int)-1;
557 unsigned int tCKmin
= 0, tAAmin
= 0;
558 FOR_EACH_POPULATED_CHANNEL(sysinfo
->dimms
, i
) {
559 cas_latencies
&= spdinfo
->channel
[i
].cas_latencies
;
560 if (spdinfo
->channel
[i
].tCKmin
> tCKmin
)
561 tCKmin
= spdinfo
->channel
[i
].tCKmin
;
562 if (spdinfo
->channel
[i
].tAAmin
> tAAmin
)
563 tAAmin
= spdinfo
->channel
[i
].tAAmin
;
566 /* Get actual value of fsb clock. */
567 sysinfo
->selected_timings
.fsb_clock
= read_fsb_clock();
568 unsigned int fsb_mhz
= 0;
569 switch (sysinfo
->selected_timings
.fsb_clock
) {
570 case FSB_CLOCK_1067MHz
: fsb_mhz
= 1067; break;
571 case FSB_CLOCK_800MHz
: fsb_mhz
= 800; break;
572 case FSB_CLOCK_667MHz
: fsb_mhz
= 667; break;
575 unsigned int clock
= 256000 / tCKmin
;
576 const unsigned int max_ddr_clock
= (sysinfo
->spd_type
== DDR2
)
577 ? sysinfo
->max_ddr2_mt
/ 2
578 : sysinfo
->max_ddr3_mt
/ 2;
579 if ((clock
> max_ddr_clock
) || (clock
> fsb_mhz
/ 2)) {
580 int new_clock
= MIN(max_ddr_clock
, fsb_mhz
/ 2);
581 printk(BIOS_INFO
, "DIMMs support %d MHz, but chipset only runs at up to %d. Limiting...\n",
585 normalize_clock(&clock
);
587 /* Find compatible clock / CAS pair. */
588 unsigned int tCKproposed
;
592 die("Couldn't find compatible clock / CAS settings.\n");
593 tCKproposed
= 256000 / clock
;
594 CAS
= DIV_ROUND_UP(tAAmin
, tCKproposed
);
595 printk(BIOS_SPEW
, "Trying CAS %u, tCK %u.\n", CAS
, tCKproposed
);
596 for (; CAS
<= DDR3_MAX_CAS
; ++CAS
)
597 if (cas_latencies
& (1 << CAS
))
599 if ((CAS
<= DDR3_MAX_CAS
) && (CAS
* tCKproposed
< 32 * 160)) {
600 /* Found good CAS. */
601 printk(BIOS_SPEW
, "Found compatible clock / CAS pair: %u / %u.\n", clock
, CAS
);
606 sysinfo
->selected_timings
.CAS
= CAS
;
607 sysinfo
->selected_timings
.mem_clock
= clock_index(clock
);
612 static void calculate_derived_timings(sysinfo_t
*const sysinfo
,
613 const unsigned int tCLK
,
614 const spdinfo_t
*const spdinfo
)
618 /* Calculate common tRASmin, tRPmin, tRCDmin and tWRmin. */
619 unsigned int tRASmin
= 0, tRPmin
= 0, tRCDmin
= 0, tWRmin
= 0;
620 FOR_EACH_POPULATED_CHANNEL(sysinfo
->dimms
, i
) {
621 if (spdinfo
->channel
[i
].tRAS
> tRASmin
)
622 tRASmin
= spdinfo
->channel
[i
].tRAS
;
623 if (spdinfo
->channel
[i
].tRP
> tRPmin
)
624 tRPmin
= spdinfo
->channel
[i
].tRP
;
625 if (spdinfo
->channel
[i
].tRCD
> tRCDmin
)
626 tRCDmin
= spdinfo
->channel
[i
].tRCD
;
627 if (spdinfo
->channel
[i
].tWR
> tWRmin
)
628 tWRmin
= spdinfo
->channel
[i
].tWR
;
630 tRASmin
= DIV_ROUND_UP(tRASmin
, tCLK
);
631 tRPmin
= DIV_ROUND_UP(tRPmin
, tCLK
);
632 tRCDmin
= DIV_ROUND_UP(tRCDmin
, tCLK
);
633 tWRmin
= DIV_ROUND_UP(tWRmin
, tCLK
);
635 /* Lookup tRFC and calculate common tRFCmin. */
636 const unsigned int tRFC_from_clock_and_cap
[][4] = {
637 /* CAP_256M CAP_512M CAP_1G CAP_2G */
638 /* 533MHz */ { 40, 56, 68, 104 },
639 /* 400MHz */ { 30, 42, 51, 78 },
640 /* 333MHz */ { 25, 35, 43, 65 },
642 unsigned int tRFCmin
= 0;
643 FOR_EACH_POPULATED_CHANNEL(sysinfo
->dimms
, i
) {
644 const unsigned int tRFC
= tRFC_from_clock_and_cap
645 [sysinfo
->selected_timings
.mem_clock
][spdinfo
->channel
[i
].chip_capacity
];
650 /* Calculate common tRD from CAS and FSB and DRAM clocks. */
651 unsigned int tRDmin
= sysinfo
->selected_timings
.CAS
;
652 switch (sysinfo
->selected_timings
.fsb_clock
) {
653 case FSB_CLOCK_667MHz
:
656 case FSB_CLOCK_800MHz
:
659 case FSB_CLOCK_1067MHz
:
661 if (sysinfo
->selected_timings
.mem_clock
== MEM_CLOCK_1067MT
)
666 /* Calculate common tRRDmin. */
667 unsigned int tRRDmin
= 0;
668 FOR_EACH_POPULATED_CHANNEL(sysinfo
->dimms
, i
) {
669 unsigned int tRRD
= 2 + (spdinfo
->channel
[i
].page_size
/ 1024);
670 if (sysinfo
->selected_timings
.mem_clock
== MEM_CLOCK_1067MT
)
671 tRRD
+= (spdinfo
->channel
[i
].page_size
/ 1024);
676 /* Lookup and calculate common tFAWmin. */
677 unsigned int tFAW_from_pagesize_and_clock
[][3] = {
678 /* 533MHz 400MHz 333MHz */
679 /* 1K */ { 20, 15, 13 },
680 /* 2K */ { 27, 20, 17 },
682 unsigned int tFAWmin
= 0;
683 FOR_EACH_POPULATED_CHANNEL(sysinfo
->dimms
, i
) {
684 const unsigned int tFAW
= tFAW_from_pagesize_and_clock
685 [spdinfo
->channel
[i
].page_size
/ 1024 - 1]
686 [sysinfo
->selected_timings
.mem_clock
];
691 /* Refresh rate is fixed. */
693 if (sysinfo
->spd_type
== DDR2
) {
694 tWL
= sysinfo
->selected_timings
.CAS
- 1;
695 } else if (sysinfo
->selected_timings
.mem_clock
== MEM_CLOCK_1067MT
) {
701 printk(BIOS_SPEW
, "Timing values:\n"
712 tCLK
, tRASmin
, tRPmin
, tRCDmin
, tRFCmin
, tWRmin
, tRDmin
, tRRDmin
, tFAWmin
, tWL
);
714 sysinfo
->selected_timings
.tRAS
= tRASmin
;
715 sysinfo
->selected_timings
.tRP
= tRPmin
;
716 sysinfo
->selected_timings
.tRCD
= tRCDmin
;
717 sysinfo
->selected_timings
.tRFC
= tRFCmin
;
718 sysinfo
->selected_timings
.tWR
= tWRmin
;
719 sysinfo
->selected_timings
.tRD
= tRDmin
;
720 sysinfo
->selected_timings
.tRRD
= tRRDmin
;
721 sysinfo
->selected_timings
.tFAW
= tFAWmin
;
722 sysinfo
->selected_timings
.tWL
= tWL
;
725 static void collect_dimm_config(sysinfo_t
*const sysinfo
)
730 spdinfo
.dimm_mask
= 0;
731 sysinfo
->spd_type
= 0;
733 for (i
= 0; i
< 4; i
++)
734 if (sysinfo
->spd_map
[i
]) {
735 const u8 spd
= smbus_read_byte(sysinfo
->spd_map
[i
], 2);
736 printk(BIOS_DEBUG
, "%x:%x:%x\n",
737 i
, sysinfo
->spd_map
[i
],
739 if ((spd
== 7) || (spd
== 8) || (spd
== 0xb)) {
740 spdinfo
.dimm_mask
|= 1 << i
;
741 if (sysinfo
->spd_type
&& sysinfo
->spd_type
!= spd
) {
742 die("Multiple types of DIMM installed in the system, don't do that!\n");
744 sysinfo
->spd_type
= spd
;
747 if (spdinfo
.dimm_mask
== 0) {
748 die("Could not find any DIMM.\n");
751 /* Normalize spd_type to 1, 2, 3. */
752 sysinfo
->spd_type
= (sysinfo
->spd_type
& 1) | ((sysinfo
->spd_type
& 8) >> 2);
753 printk(BIOS_SPEW
, "DDR mask %x, DDR %d\n", spdinfo
.dimm_mask
, sysinfo
->spd_type
);
755 if (sysinfo
->spd_type
== DDR2
) {
756 verify_ddr2(sysinfo
, spdinfo
.dimm_mask
);
757 collect_ddr2(sysinfo
, &spdinfo
);
758 } else if (sysinfo
->spd_type
== DDR3
) {
759 verify_ddr3(sysinfo
, spdinfo
.dimm_mask
);
760 collect_ddr3(sysinfo
, &spdinfo
);
762 die("Will never support DDR1.\n");
765 for (i
= 0; i
< 2; i
++) {
766 if ((spdinfo
.dimm_mask
>> (i
*2)) & 1) {
767 printk(BIOS_SPEW
, "Bank %d populated:\n"
768 " Raw card type: %4c\n"
769 " Row addr bits: %4u\n"
770 " Col addr bits: %4u\n"
777 " Max clock: %3u MHz\n"
779 i
, spdinfo
.channel
[i
].raw_card
+ 'A',
780 spdinfo
.channel
[i
].rows
, spdinfo
.channel
[i
].cols
,
781 spdinfo
.channel
[i
].width
, spdinfo
.channel
[i
].page_size
,
782 spdinfo
.channel
[i
].banks
, spdinfo
.channel
[i
].ranks
,
783 spdinfo
.channel
[i
].tAAmin
, spdinfo
.channel
[i
].tCKmin
,
784 256000 / spdinfo
.channel
[i
].tCKmin
, spdinfo
.channel
[i
].cas_latencies
);
788 FOR_EACH_CHANNEL(i
) {
789 sysinfo
->dimms
[i
].card_type
=
790 (spdinfo
.dimm_mask
& (1 << (i
* 2))) ? spdinfo
.channel
[i
].raw_card
+ 0xa : 0;
791 sysinfo
->dimms
[i
].refresh
= spdinfo
.channel
[i
].refresh
;
794 /* Find common memory clock and CAS. */
795 const unsigned int tCLK
= find_common_clock_cas(sysinfo
, &spdinfo
);
797 /* Calculate other timings from clock and CAS. */
798 calculate_derived_timings(sysinfo
, tCLK
, &spdinfo
);
800 /* Initialize DIMM infos. */
801 /* Always prefer interleaved over async channel mode. */
802 FOR_EACH_CHANNEL(i
) {
803 IF_CHANNEL_POPULATED(sysinfo
->dimms
, i
) {
804 sysinfo
->dimms
[i
].banks
= spdinfo
.channel
[i
].banks
;
805 sysinfo
->dimms
[i
].ranks
= spdinfo
.channel
[i
].ranks
;
807 /* .width is 1 for x8 or 2 for x16, bus width is 8 bytes. */
808 const unsigned int chips_per_rank
= 8 / spdinfo
.channel
[i
].width
;
810 sysinfo
->dimms
[i
].chip_width
= spdinfo
.channel
[i
].width
;
811 sysinfo
->dimms
[i
].chip_capacity
= spdinfo
.channel
[i
].chip_capacity
;
812 sysinfo
->dimms
[i
].page_size
= spdinfo
.channel
[i
].page_size
* chips_per_rank
;
813 sysinfo
->dimms
[i
].rank_capacity_mb
=
814 /* offset of chip_capacity is 8 (256M), therefore, add 8
815 chip_capacity is in Mbit, we want MByte, therefore, subtract 3 */
816 (1 << (spdinfo
.channel
[i
].chip_capacity
+ 8 - 3)) * chips_per_rank
;
819 if (CHANNEL_IS_POPULATED(sysinfo
->dimms
, 0) &&
820 CHANNEL_IS_POPULATED(sysinfo
->dimms
, 1))
821 sysinfo
->selected_timings
.channel_mode
= CHANNEL_MODE_DUAL_INTERLEAVED
;
823 sysinfo
->selected_timings
.channel_mode
= CHANNEL_MODE_SINGLE
;
826 static void reset_on_bad_warmboot(void)
828 /* Check self refresh channel status. */
829 const u32 reg
= mchbar_read32(PMSTS_MCHBAR
);
830 /* Clear status bits. R/WC */
831 mchbar_write32(PMSTS_MCHBAR
, reg
);
832 if ((reg
& PMSTS_WARM_RESET
) && !(reg
& PMSTS_BOTH_SELFREFRESH
)) {
833 printk(BIOS_INFO
, "DRAM was not in self refresh "
834 "during warm boot, reset required.\n");
839 static void set_system_memory_frequency(const timings_t
*const timings
)
841 mchbar_clrbits16(CLKCFG_MCHBAR
+ 0x60, 1 << 15);
842 mchbar_clrbits16(CLKCFG_MCHBAR
+ 0x48, 1 << 15);
844 /* Calculate wanted frequency setting. */
845 const int want_freq
= 6 - timings
->mem_clock
;
847 /* Read current memory frequency. */
848 const u32 clkcfg
= mchbar_read32(CLKCFG_MCHBAR
);
849 int cur_freq
= (clkcfg
& CLKCFG_MEMCLK_MASK
) >> CLKCFG_MEMCLK_SHIFT
;
851 /* Try memory frequency from scratchpad. */
852 printk(BIOS_DEBUG
, "Reading current memory frequency from scratchpad.\n");
853 cur_freq
= (mchbar_read16(SSKPD_MCHBAR
) & SSKPD_CLK_MASK
) >> SSKPD_CLK_SHIFT
;
856 if (cur_freq
!= want_freq
) {
857 printk(BIOS_DEBUG
, "Changing memory frequency: old %x, new %x.\n", cur_freq
, want_freq
);
858 /* When writing new frequency setting, reset, then set update bit. */
859 mchbar_clrsetbits32(CLKCFG_MCHBAR
, CLKCFG_UPDATE
| CLKCFG_MEMCLK_MASK
,
860 want_freq
<< CLKCFG_MEMCLK_SHIFT
);
861 mchbar_clrsetbits32(CLKCFG_MCHBAR
, CLKCFG_MEMCLK_MASK
,
862 want_freq
<< CLKCFG_MEMCLK_SHIFT
| CLKCFG_UPDATE
);
863 /* Reset update bit. */
864 mchbar_clrbits32(CLKCFG_MCHBAR
, CLKCFG_UPDATE
);
867 if ((timings
->fsb_clock
== FSB_CLOCK_1067MHz
) && (timings
->mem_clock
== MEM_CLOCK_667MT
)) {
868 mchbar_write32(CLKCFG_MCHBAR
+ 0x16, 0x000030f0);
869 mchbar_write32(CLKCFG_MCHBAR
+ 0x64, 0x000050c1);
871 mchbar_clrsetbits32(CLKCFG_MCHBAR
, 1 << 12, 1 << 17);
872 mchbar_setbits32(CLKCFG_MCHBAR
, 1 << 17 | 1 << 12);
873 mchbar_clrbits32(CLKCFG_MCHBAR
, 1 << 12);
875 mchbar_write32(CLKCFG_MCHBAR
+ 0x04, 0x9bad1f1f);
876 mchbar_write8(CLKCFG_MCHBAR
+ 0x08, 0xf4);
877 mchbar_write8(CLKCFG_MCHBAR
+ 0x0a, 0x43);
878 mchbar_write8(CLKCFG_MCHBAR
+ 0x0c, 0x10);
879 mchbar_write8(CLKCFG_MCHBAR
+ 0x0d, 0x80);
880 mchbar_write32(CLKCFG_MCHBAR
+ 0x50, 0x0b0e151b);
881 mchbar_write8(CLKCFG_MCHBAR
+ 0x54, 0xb4);
882 mchbar_write8(CLKCFG_MCHBAR
+ 0x55, 0x10);
883 mchbar_write8(CLKCFG_MCHBAR
+ 0x56, 0x08);
885 mchbar_setbits32(CLKCFG_MCHBAR
, 1 << 10);
886 mchbar_setbits32(CLKCFG_MCHBAR
, 1 << 11);
887 mchbar_clrbits32(CLKCFG_MCHBAR
, 1 << 10);
888 mchbar_clrbits32(CLKCFG_MCHBAR
, 1 << 11);
891 mchbar_setbits32(CLKCFG_MCHBAR
+ 0x48, 0x3f << 24);
894 int raminit_read_vco_index(void)
896 switch (mchbar_read8(HPLLVCO_MCHBAR
) & 0x7) {
906 die("Unknown VCO frequency.\n");
910 static void set_igd_memory_frequencies(const sysinfo_t
*const sysinfo
)
912 const int gfx_idx
= ((sysinfo
->gfx_type
== GMCH_GS45
) &&
913 !sysinfo
->gs45_low_power_mode
)
914 ? (GMCH_GS45
+ 1) : sysinfo
->gfx_type
;
916 /* Render and sampler frequency values seem to be some kind of factor. */
917 const u16 render_freq_from_vco_and_gfxtype
[][10] = {
918 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
919 /* VCO 2666 */ { 0xd, 0xd, 0xe, 0xd, 0xb, 0xd, 0xb, 0xa, 0xd },
920 /* VCO 3200 */ { 0xd, 0xe, 0xf, 0xd, 0xb, 0xd, 0xb, 0x9, 0xd },
921 /* VCO 4000 */ { 0xc, 0xd, 0xf, 0xc, 0xa, 0xc, 0xa, 0x9, 0xc },
922 /* VCO 5333 */ { 0xb, 0xc, 0xe, 0xb, 0x9, 0xb, 0x9, 0x8, 0xb },
924 const u16 sampler_freq_from_vco_and_gfxtype
[][10] = {
925 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
926 /* VCO 2666 */ { 0xc, 0xc, 0xd, 0xc, 0x9, 0xc, 0x9, 0x8, 0xc },
927 /* VCO 3200 */ { 0xc, 0xd, 0xe, 0xc, 0x9, 0xc, 0x9, 0x8, 0xc },
928 /* VCO 4000 */ { 0xa, 0xc, 0xd, 0xa, 0x8, 0xa, 0x8, 0x8, 0xa },
929 /* VCO 5333 */ { 0xa, 0xa, 0xc, 0xa, 0x7, 0xa, 0x7, 0x6, 0xa },
931 const u16 display_clock_select_from_gfxtype
[] = {
932 /* GM45 GM47 GM49 GE45 GL40 GL43 GS40 GS45 (perf) */
933 1, 1, 1, 1, 1, 1, 1, 0, 1
936 if (pci_read_config16(GCFGC_PCIDEV
, 0) != 0x8086) {
937 printk(BIOS_DEBUG
, "Skipping IGD memory frequency setting.\n");
941 mchbar_write16(0x119e, 0xa800);
942 mchbar_clrsetbits16(0x11c0, 0xff << 8, 0x01 << 8);
943 mchbar_write16(0x119e, 0xb800);
944 mchbar_setbits8(0x0f10, 1 << 7);
947 const int vco_idx
= raminit_read_vco_index();
948 printk(BIOS_DEBUG
, "Setting IGD memory frequencies for VCO #%d.\n", vco_idx
);
951 ((render_freq_from_vco_and_gfxtype
[vco_idx
][gfx_idx
]
952 << GCFGC_CR_SHIFT
) & GCFGC_CR_MASK
) |
953 ((sampler_freq_from_vco_and_gfxtype
[vco_idx
][gfx_idx
]
954 << GCFGC_CS_SHIFT
) & GCFGC_CS_MASK
);
956 /* Set frequencies, clear update bit. */
957 u32 gcfgc
= pci_read_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
);
958 gcfgc
&= ~(GCFGC_CS_MASK
| GCFGC_UPDATE
| GCFGC_CR_MASK
);
960 pci_write_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
, gcfgc
);
962 /* Set frequencies, set update bit. */
963 gcfgc
= pci_read_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
);
964 gcfgc
&= ~(GCFGC_CS_MASK
| GCFGC_CR_MASK
);
965 gcfgc
|= freqcfg
| GCFGC_UPDATE
;
966 pci_write_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
, gcfgc
);
968 /* Clear update bit. */
969 pci_and_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
, ~GCFGC_UPDATE
);
971 /* Set display clock select bit. */
972 pci_write_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
,
973 (pci_read_config16(GCFGC_PCIDEV
, GCFGC_OFFSET
) & ~GCFGC_CD_MASK
) |
974 (display_clock_select_from_gfxtype
[gfx_idx
] << GCFGC_CD_SHIFT
));
977 static void configure_dram_control_mode(const timings_t
*const timings
, const dimminfo_t
*const dimms
)
981 FOR_EACH_CHANNEL(ch
) {
982 unsigned int mchbar
= CxDRC0_MCHBAR(ch
);
983 u32 cxdrc
= mchbar_read32(mchbar
);
984 cxdrc
&= ~CxDRC0_RANKEN_MASK
;
985 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms
, ch
, r
)
986 cxdrc
|= CxDRC0_RANKEN(r
);
987 if (dimms
[ch
].refresh
== REFRESH_3_9
)
988 cxdrc
= (cxdrc
& ~CxDRC0_RMS_MASK
) | CxDRC0_RMS_39US
;
990 cxdrc
= (cxdrc
& ~CxDRC0_RMS_MASK
) | CxDRC0_RMS_78US
;
991 mchbar_write32(mchbar
, cxdrc
);
993 mchbar
= CxDRC1_MCHBAR(ch
);
994 cxdrc
= mchbar_read32(mchbar
);
995 cxdrc
|= CxDRC1_NOTPOP_MASK
;
996 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms
, ch
, r
)
997 cxdrc
&= ~CxDRC1_NOTPOP(r
);
998 cxdrc
|= CxDRC1_MUSTWR
;
999 mchbar_write32(mchbar
, cxdrc
);
1001 mchbar
= CxDRC2_MCHBAR(ch
);
1002 cxdrc
= mchbar_read32(mchbar
);
1003 cxdrc
|= CxDRC2_NOTPOP_MASK
;
1004 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms
, ch
, r
)
1005 cxdrc
&= ~CxDRC2_NOTPOP(r
);
1006 cxdrc
|= CxDRC2_MUSTWR
;
1007 if (timings
->mem_clock
== MEM_CLOCK_1067MT
)
1008 cxdrc
|= CxDRC2_CLK1067MT
;
1009 mchbar_write32(mchbar
, cxdrc
);
1013 static void rcomp_initialization(const int spd_type
, const stepping_t stepping
, const int sff
)
1015 /* Program RCOMP codes. */
1017 die("SFF platform unsupported in RCOMP initialization.\n");
1019 if (spd_type
== DDR2
) {
1021 for (o
= 0; o
<= 0x200; o
+= 0x40) {
1022 mchbar_clrsetbits8(0x6ac + o
, 0x0f, 0x0a);
1023 mchbar_write8(0x6b0 + o
, 0x55);
1025 /* ODT multiplier bits. */
1026 mchbar_clrsetbits32(0x04d0, 7 << 3 | 7 << 0, 1 << 3 | 1 << 0);
1028 /* Values are for DDR3. */
1029 mchbar_clrbits8(0x6ac, 0x0f);
1030 mchbar_write8(0x6b0, 0x55);
1031 mchbar_clrbits8(0x6ec, 0x0f);
1032 mchbar_write8(0x6f0, 0x66);
1033 mchbar_clrbits8(0x72c, 0x0f);
1034 mchbar_write8(0x730, 0x66);
1035 mchbar_clrbits8(0x76c, 0x0f);
1036 mchbar_write8(0x770, 0x66);
1037 mchbar_clrbits8(0x7ac, 0x0f);
1038 mchbar_write8(0x7b0, 0x66);
1039 mchbar_clrbits8(0x7ec, 0x0f);
1040 mchbar_write8(0x7f0, 0x66);
1041 mchbar_clrbits8(0x86c, 0x0f);
1042 mchbar_write8(0x870, 0x55);
1043 mchbar_clrbits8(0x8ac, 0x0f);
1044 mchbar_write8(0x8b0, 0x66);
1045 /* ODT multiplier bits. */
1046 mchbar_clrsetbits32(0x04d0, 7 << 3 | 7 << 0, 2 << 3 | 2 << 0);
1049 /* Perform RCOMP calibration for DDR3. */
1050 raminit_rcomp_calibration(stepping
);
1052 /* Run initial RCOMP. */
1053 mchbar_setbits32(0x418, 1 << 17);
1054 mchbar_clrbits32(0x40c, 1 << 23);
1055 mchbar_clrbits32(0x41c, 1 << 7 | 1 << 3);
1056 mchbar_setbits32(0x400, 1);
1057 while (mchbar_read32(0x400) & 1) {}
1059 /* Run second RCOMP. */
1060 mchbar_setbits32(0x40c, 1 << 19);
1061 mchbar_setbits32(0x400, 1);
1062 while (mchbar_read32(0x400) & 1) {}
1064 /* Cleanup and start periodic RCOMP. */
1065 mchbar_clrbits32(0x40c, 1 << 19);
1066 mchbar_setbits32(0x40c, 1 << 23);
1067 mchbar_clrbits32(0x418, 1 << 17);
1068 mchbar_setbits32(0x41c, 1 << 7 | 1 << 3);
1069 mchbar_setbits32(0x400, 1 << 1);
1072 static void dram_powerup(const int spd_type
, const int stepping
, const int resume
)
1077 tmp
= mchbar_read32(CLKCFG_MCHBAR
);
1078 tmp
&= ~(3 << 21 | 1 << 3);
1079 if (spd_type
== DDR2
&& stepping
< STEPPING_B0
)
1080 tmp
|= 2 << 21 | 1 << 3;
1083 mchbar_write32(CLKCFG_MCHBAR
, tmp
);
1085 if (spd_type
== DDR3
&& !resume
) {
1086 mchbar_setbits32(0x1434, 1 << 10);
1089 mchbar_setbits32(0x1434, 1 << 6);
1090 if (spd_type
== DDR3
&& !resume
) {
1092 mchbar_setbits32(0x1434, 1 << 9);
1093 mchbar_clrbits32(0x1434, 1 << 10);
1098 static void dram_program_timings(const int spd_type
, const timings_t
*const timings
)
1100 /* Values are for DDR3. */
1101 const int burst_length
= 8;
1102 const int tWTR
= (spd_type
== DDR2
) ? 3 : 4, tRTP
= 1;
1105 FOR_EACH_CHANNEL(i
) {
1106 u32 reg
= mchbar_read32(CxDRT0_MCHBAR(i
));
1107 const int btb_wtp
= timings
->tWL
+ burst_length
/2 + timings
->tWR
;
1109 ((spd_type
== DDR2
) ? timings
->CAS
- 1 : timings
->tWL
)
1110 + burst_length
/2 + tWTR
;
1111 reg
= (reg
& ~(CxDRT0_BtB_WtP_MASK
| CxDRT0_BtB_WtR_MASK
)) |
1112 ((btb_wtp
<< CxDRT0_BtB_WtP_SHIFT
) & CxDRT0_BtB_WtP_MASK
) |
1113 ((btb_wtr
<< CxDRT0_BtB_WtR_SHIFT
) & CxDRT0_BtB_WtR_MASK
);
1114 if (spd_type
== DDR2
) {
1115 reg
= (reg
& ~(0x7 << 15)) | (2 << 15);
1116 if (timings
->mem_clock
== MEM_CLOCK_667MT
)
1117 reg
= (reg
& ~(0xf << 10)) | (2 << 10);
1119 reg
= (reg
& ~(0xf << 10)) | (3 << 10);
1120 reg
= (reg
& ~(0x7 << 5)) | (3 << 5);
1121 } else if (timings
->mem_clock
!= MEM_CLOCK_1067MT
) {
1122 reg
= (reg
& ~(0x7 << 15)) | ((9 - timings
->CAS
) << 15);
1123 reg
= (reg
& ~(0xf << 10)) | ((timings
->CAS
- 3) << 10);
1124 reg
= (reg
& ~(0x7 << 5)) | (3 << 5);
1126 reg
= (reg
& ~(0x7 << 15)) | ((10 - timings
->CAS
) << 15);
1127 reg
= (reg
& ~(0xf << 10)) | ((timings
->CAS
- 4) << 10);
1128 reg
= (reg
& ~(0x7 << 5)) | (3 << 5);
1130 reg
= (reg
& ~(0x7 << 0)) | (1 << 0);
1131 mchbar_write32(CxDRT0_MCHBAR(i
), reg
);
1133 reg
= mchbar_read32(CxDRT1_MCHBAR(i
));
1134 reg
= (reg
& ~(0x03 << 28)) | ((tRTP
& 0x03) << 28);
1135 reg
= (reg
& ~(0x1f << 21)) | ((timings
->tRAS
& 0x1f) << 21);
1136 reg
= (reg
& ~(0x07 << 10)) | (((timings
->tRRD
- 2) & 0x07) << 10);
1137 reg
= (reg
& ~(0x07 << 5)) | (((timings
->tRCD
- 2) & 0x07) << 5);
1138 reg
= (reg
& ~(0x07 << 0)) | (((timings
->tRP
- 2) & 0x07) << 0);
1139 mchbar_write32(CxDRT1_MCHBAR(i
), reg
);
1141 reg
= mchbar_read32(CxDRT2_MCHBAR(i
));
1142 reg
= (reg
& ~(0x1f << 17)) | ((timings
->tFAW
& 0x1f) << 17);
1143 if (spd_type
== DDR2
) {
1144 reg
= (reg
& ~(0x7 << 12)) | (0x1 << 12);
1145 reg
= (reg
& ~(0xf << 6)) | (0x1 << 6);
1146 } else if (timings
->mem_clock
!= MEM_CLOCK_1067MT
) {
1147 reg
= (reg
& ~(0x7 << 12)) | (0x2 << 12);
1148 reg
= (reg
& ~(0xf << 6)) | (0x9 << 6);
1150 reg
= (reg
& ~(0x7 << 12)) | (0x3 << 12);
1151 reg
= (reg
& ~(0xf << 6)) | (0xc << 6);
1153 reg
= (reg
& ~(0x1f << 0)) | (0x13 << 0);
1154 mchbar_write32(CxDRT2_MCHBAR(i
), reg
);
1156 reg
= mchbar_read32(CxDRT3_MCHBAR(i
));
1157 if (spd_type
== DDR2
)
1158 reg
&= ~(0x3 << 28);
1161 reg
= (reg
& ~(0x03 << 26));
1162 reg
= (reg
& ~(0x07 << 23)) | (((timings
->CAS
- 3) & 0x07) << 23);
1163 reg
= (reg
& ~(0xff << 13)) | ((timings
->tRFC
& 0xff) << 13);
1164 reg
= (reg
& ~(0x07 << 0)) | (((timings
->tWL
- 2) & 0x07) << 0);
1165 mchbar_write32(CxDRT3_MCHBAR(i
), reg
);
1167 reg
= mchbar_read32(CxDRT4_MCHBAR(i
));
1168 static const u8 timings_by_clock
[4][3] = {
1169 /* 333MHz 400MHz 533MHz
1170 667MT 800MT 1067MT */
1171 { 0x07, 0x0a, 0x0d },
1172 { 0x3a, 0x46, 0x5d },
1173 { 0x0c, 0x0e, 0x18 },
1174 { 0x21, 0x28, 0x35 },
1176 const int clk_idx
= 2 - timings
->mem_clock
;
1177 reg
= (reg
& ~(0x01f << 27)) | (timings_by_clock
[0][clk_idx
] << 27);
1178 reg
= (reg
& ~(0x3ff << 17)) | (timings_by_clock
[1][clk_idx
] << 17);
1179 reg
= (reg
& ~(0x03f << 10)) | (timings_by_clock
[2][clk_idx
] << 10);
1180 reg
= (reg
& ~(0x1ff << 0)) | (timings_by_clock
[3][clk_idx
] << 0);
1181 mchbar_write32(CxDRT4_MCHBAR(i
), reg
);
1183 reg
= mchbar_read32(CxDRT5_MCHBAR(i
));
1184 if (timings
->mem_clock
== MEM_CLOCK_1067MT
)
1185 reg
= (reg
& ~(0xf << 28)) | (0x8 << 28);
1186 reg
= (reg
& ~(0x00f << 22)) | ((burst_length
/2 + timings
->CAS
+ 2) << 22);
1187 if (spd_type
== DDR2
) {
1188 if (timings
->mem_clock
== MEM_CLOCK_667MT
)
1189 reg
= (reg
& ~(0x1ff << 12)) | (0x21 << 12);
1191 reg
= (reg
& ~(0x1ff << 12)) | (0x28 << 12);
1193 reg
= (reg
& ~(0x1ff << 12)) | (0x190 << 12);
1195 reg
= (reg
& ~(0x00f << 4)) | ((timings
->CAS
- 2) << 4);
1196 reg
= (reg
& ~(0x003 << 2)) | (0x001 << 2);
1197 reg
= (reg
& ~(0x003 << 0));
1198 mchbar_write32(CxDRT5_MCHBAR(i
), reg
);
1200 reg
= mchbar_read32(CxDRT6_MCHBAR(i
));
1201 if (spd_type
== DDR2
) {
1204 reg
= (reg
& ~(0xffff << 16)) | (0x066a << 16); /* always 7.8us refresh rate for DDR3 */
1207 mchbar_write32(CxDRT6_MCHBAR(i
), reg
);
1211 static void dram_program_banks(const dimminfo_t
*const dimms
)
1215 FOR_EACH_CHANNEL(ch
) {
1216 const int tRPALL
= dimms
[ch
].banks
== 8;
1218 u32 reg
= mchbar_read32(CxDRT1_MCHBAR(ch
)) & ~(0x01 << 15);
1219 IF_CHANNEL_POPULATED(dimms
, ch
)
1220 reg
|= tRPALL
<< 15;
1221 mchbar_write32(CxDRT1_MCHBAR(ch
), reg
);
1223 reg
= mchbar_read32(CxDRA_MCHBAR(ch
)) & ~CxDRA_BANKS_MASK
;
1224 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms
, ch
, r
) {
1225 reg
|= CxDRA_BANKS(r
, dimms
[ch
].banks
);
1227 mchbar_write32(CxDRA_MCHBAR(ch
), reg
);
1231 static void ddr3_odt_setup(const timings_t
*const timings
, const int sff
)
1235 FOR_EACH_CHANNEL(ch
) {
1236 u32 reg
= mchbar_read32(CxODT_HIGH(ch
));
1237 if (sff
&& (timings
->mem_clock
!= MEM_CLOCK_1067MT
))
1238 reg
&= ~(0x3 << (61 - 32));
1240 reg
|= 0x3 << (61 - 32);
1241 reg
= (reg
& ~(0x3 << (52 - 32))) | (0x2 << (52 - 32));
1242 reg
= (reg
& ~(0x7 << (48 - 32))) | ((timings
->CAS
- 3) << (48 - 32));
1243 reg
= (reg
& ~(0xf << (44 - 32))) | (0x7 << (44 - 32));
1244 if (timings
->mem_clock
!= MEM_CLOCK_1067MT
) {
1245 reg
= (reg
& ~(0xf << (40 - 32))) | ((12 - timings
->CAS
) << (40 - 32));
1246 reg
= (reg
& ~(0xf << (36 - 32))) | (( 2 + timings
->CAS
) << (36 - 32));
1248 reg
= (reg
& ~(0xf << (40 - 32))) | ((13 - timings
->CAS
) << (40 - 32));
1249 reg
= (reg
& ~(0xf << (36 - 32))) | (( 1 + timings
->CAS
) << (36 - 32));
1251 reg
= (reg
& ~(0xf << (32 - 32))) | (0x7 << (32 - 32));
1252 mchbar_write32(CxODT_HIGH(ch
), reg
);
1254 reg
= mchbar_read32(CxODT_LOW(ch
));
1255 reg
= (reg
& ~(0x7 << 28)) | (0x2 << 28);
1256 reg
= (reg
& ~(0x3 << 22)) | (0x2 << 22);
1257 reg
= (reg
& ~(0x7 << 12)) | (0x2 << 12);
1258 reg
= (reg
& ~(0x7 << 4)) | (0x2 << 4);
1259 switch (timings
->mem_clock
) {
1260 case MEM_CLOCK_667MT
:
1263 case MEM_CLOCK_800MT
:
1264 reg
= (reg
& ~0x7) | 0x2;
1266 case MEM_CLOCK_1067MT
:
1267 reg
= (reg
& ~0x7) | 0x5;
1270 mchbar_write32(CxODT_LOW(ch
), reg
);
1274 static void ddr2_odt_setup(const timings_t
*const timings
, const int sff
)
1278 FOR_EACH_CHANNEL(ch
) {
1279 u32 reg
= mchbar_read32(CxODT_HIGH(ch
));
1280 if (sff
&& (timings
->mem_clock
== MEM_CLOCK_667MT
))
1281 reg
&= ~(0x3 << (61 - 32));
1283 reg
|= 0x3 << (61 - 32);
1284 reg
= (reg
& ~(0x3 << (52 - 32))) | (1 << (52 - 32));
1285 reg
= (reg
& ~(0x7 << (48 - 32))) | ((timings
->CAS
- 2) << (48 - 32));
1286 reg
= (reg
& ~(0xf << (44 - 32))) | (8 << (44 - 32));
1287 reg
= (reg
& ~(0xf << (40 - 32))) | (7 << (40 - 32));
1288 if (timings
->mem_clock
== MEM_CLOCK_667MT
) {
1289 reg
= (reg
& ~(0xf << (36 - 32))) | (4 << (36 - 32));
1290 reg
= (reg
& ~(0xf << (32 - 32))) | (4 << (32 - 32));
1292 reg
= (reg
& ~(0xf << (36 - 32))) | (5 << (36 - 32));
1293 reg
= (reg
& ~(0xf << (32 - 32))) | (5 << (32 - 32));
1295 mchbar_write32(CxODT_HIGH(ch
), reg
);
1297 reg
= mchbar_read32(CxODT_LOW(ch
));
1298 if (timings
->mem_clock
== MEM_CLOCK_667MT
)
1299 reg
= (reg
& ~(0x7 << 28)) | (2 << 28);
1301 reg
= (reg
& ~(0x7 << 28)) | (3 << 28);
1302 reg
= (reg
& ~(0x3 << 22)) | (1 << 22);
1303 if (timings
->mem_clock
== MEM_CLOCK_667MT
)
1304 reg
= (reg
& ~(0x7 << 12)) | ((timings
->tWL
- 1) << 12);
1306 reg
= (reg
& ~(0x7 << 12)) | ((timings
->tWL
- 2) << 12);
1307 reg
= (reg
& ~(0x7 << 4)) | ((timings
->tWL
- 1) << 4);
1308 reg
= (reg
& ~(0x7 << 0));
1309 mchbar_write32(CxODT_LOW(ch
), reg
);
1313 static void misc_settings(const timings_t
*const timings
,
1314 const stepping_t stepping
)
1316 mchbar_clrsetbits32(0x1260, 1 << 24 | 0x1f, timings
->tRD
);
1317 mchbar_clrsetbits32(0x1360, 1 << 24 | 0x1f, timings
->tRD
);
1319 mchbar_clrsetbits8(0x1268, 0xf, timings
->tWL
);
1320 mchbar_clrsetbits8(0x1368, 0xf, timings
->tWL
);
1321 mchbar_clrsetbits8(0x12a0, 0xf, 0xa);
1322 mchbar_clrsetbits8(0x13a0, 0xf, 0xa);
1324 mchbar_clrsetbits32(0x218, 7 << 29 | 7 << 25 | 3 << 22 | 3 << 10,
1325 4 << 29 | 3 << 25 | 0 << 22 | 1 << 10);
1326 mchbar_clrsetbits32(0x220, 7 << 16, 1 << 21 | 1 << 16);
1327 mchbar_clrsetbits32(0x224, 7 << 8, 3 << 8);
1328 if (stepping
>= STEPPING_B1
)
1329 mchbar_setbits8(0x234, 1 << 3);
1332 static void clock_crossing_setup(const fsb_clock_t fsb
,
1333 const mem_clock_t ddr3clock
,
1334 const dimminfo_t
*const dimms
)
1338 static const u32 values_from_fsb_and_mem
[][3][4] = {
1340 /* DDR3-1067 */ { 0x00000000, 0x00000000, 0x00180006, 0x00810060 },
1341 /* DDR3-800 */ { 0x00000000, 0x00000000, 0x0000001c, 0x000300e0 },
1342 /* DDR3-667 */ { 0x00000000, 0x00001c00, 0x03c00038, 0x0007e000 },
1345 /* DDR3-1067 */ { 0, 0, 0, 0 },
1346 /* DDR3-800 */ { 0x00000000, 0x00000000, 0x0030000c, 0x000300c0 },
1347 /* DDR3-667 */ { 0x00000000, 0x00000380, 0x0060001c, 0x00030c00 },
1350 /* DDR3-1067 */ { 0, 0, 0, 0 },
1351 /* DDR3-800 */ { 0, 0, 0, 0 },
1352 /* DDR3-667 */ { 0x00000000, 0x00000000, 0x0030000c, 0x000300c0 },
1356 const u32
*data
= values_from_fsb_and_mem
[fsb
][ddr3clock
];
1357 mchbar_write32(0x0208, data
[3]);
1358 mchbar_write32(0x020c, data
[2]);
1359 if (((fsb
== FSB_CLOCK_1067MHz
) || (fsb
== FSB_CLOCK_800MHz
)) && (ddr3clock
== MEM_CLOCK_667MT
))
1360 mchbar_write32(0x0210, data
[1]);
1362 static const u32 from_fsb_and_mem
[][3] = {
1363 /* DDR3-1067 DDR3-800 DDR3-667 */
1364 /* FSB 1067MHz */{ 0x40100401, 0x10040220, 0x08040110, },
1365 /* FSB 800MHz */{ 0x00000000, 0x40100401, 0x00080201, },
1366 /* FSB 667MHz */{ 0x00000000, 0x00000000, 0x40100401, },
1368 FOR_EACH_CHANNEL(ch
) {
1369 const unsigned int mchbar
= 0x1258 + (ch
* 0x0100);
1370 if ((fsb
== FSB_CLOCK_1067MHz
) && (ddr3clock
== MEM_CLOCK_800MT
) && CHANNEL_IS_CARDF(dimms
, ch
))
1371 mchbar_write32(mchbar
, 0x08040120);
1373 mchbar_write32(mchbar
, from_fsb_and_mem
[fsb
][ddr3clock
]);
1374 mchbar_write32(mchbar
+ 4, 0);
1378 /* Program egress VC1 isoch timings. */
1379 static void vc1_program_timings(const fsb_clock_t fsb
)
1381 const u32 timings_by_fsb
[][2] = {
1382 /* FSB 1067MHz */ { 0x1a, 0x01380138 },
1383 /* FSB 800MHz */ { 0x14, 0x00f000f0 },
1384 /* FSB 667MHz */ { 0x10, 0x00c000c0 },
1386 epbar_write8(EPVC1ITC
, timings_by_fsb
[fsb
][0]);
1387 epbar_write32(EPVC1IST
+ 0, timings_by_fsb
[fsb
][1]);
1388 epbar_write32(EPVC1IST
+ 4, timings_by_fsb
[fsb
][1]);
1391 #define DEFAULT_PCI_MMIO_SIZE 2048
1392 #define HOST_BRIDGE PCI_DEVFN(0, 0)
1394 static unsigned int get_mmio_size(void)
1396 const struct device
*dev
;
1397 const struct northbridge_intel_gm45_config
*cfg
= NULL
;
1399 dev
= pcidev_path_on_root(HOST_BRIDGE
);
1401 cfg
= dev
->chip_info
;
1403 /* If this is zero, it just means devicetree.cb didn't set it */
1404 if (!cfg
|| cfg
->pci_mmio_size
== 0)
1405 return DEFAULT_PCI_MMIO_SIZE
;
1407 return cfg
->pci_mmio_size
;
1410 /* @prejedec if not zero, set rank size to 128MB and page size to 4KB. */
1411 static void program_memory_map(const dimminfo_t
*const dimms
, const channel_mode_t mode
, const int prejedec
, u16 ggc
)
1415 /* Program rank boundaries (CxDRBy). */
1416 unsigned int base
= 0; /* start of next rank in MB */
1417 unsigned int total_mb
[2] = { 0, 0 }; /* total memory per channel in MB */
1418 FOR_EACH_CHANNEL(ch
) {
1419 if (mode
== CHANNEL_MODE_DUAL_INTERLEAVED
)
1420 /* In interleaved mode, start every channel from 0. */
1422 for (r
= 0; r
< RANKS_PER_CHANNEL
; r
+= 2) {
1423 /* Fixed capacity for pre-jedec config. */
1424 const unsigned int rank_capacity_mb
=
1425 prejedec
? 128 : dimms
[ch
].rank_capacity_mb
;
1428 /* Program bounds in CxDRBy. */
1429 IF_RANK_POPULATED(dimms
, ch
, r
) {
1430 base
+= rank_capacity_mb
;
1431 total_mb
[ch
] += rank_capacity_mb
;
1433 reg
|= CxDRBy_BOUND_MB(r
, base
);
1434 IF_RANK_POPULATED(dimms
, ch
, r
+1) {
1435 base
+= rank_capacity_mb
;
1436 total_mb
[ch
] += rank_capacity_mb
;
1438 reg
|= CxDRBy_BOUND_MB(r
+1, base
);
1440 mchbar_write32(CxDRBy_MCHBAR(ch
, r
), reg
);
1444 /* Program page size (CxDRA). */
1445 FOR_EACH_CHANNEL(ch
) {
1446 u32 reg
= mchbar_read32(CxDRA_MCHBAR(ch
)) & ~CxDRA_PAGESIZE_MASK
;
1447 FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms
, ch
, r
) {
1448 /* Fixed page size for pre-jedec config. */
1449 const unsigned int page_size
= /* dimm page size in bytes */
1450 prejedec
? 4096 : dimms
[ch
].page_size
;
1451 reg
|= CxDRA_PAGESIZE(r
, log2(page_size
));
1452 /* deferred to f5_27: reg |= CxDRA_BANKS(r, dimms[ch].banks); */
1454 mchbar_write32(CxDRA_MCHBAR(ch
), reg
);
1457 /* Calculate memory mapping, all values in MB. */
1462 printk(BIOS_DEBUG
, "IGD decoded, subtracting ");
1464 /* Graphics memory */
1465 const u32 gms_sizek
= decode_igd_memory_size((ggc
>> 4) & 0xf);
1466 printk(BIOS_DEBUG
, "%uM UMA", gms_sizek
>> 10);
1468 /* GTT Graphics Stolen Memory Size (GGMS) */
1469 const u32 gsm_sizek
= decode_igd_gtt_size((ggc
>> 8) & 0xf);
1470 printk(BIOS_DEBUG
, " and %uM GTT\n", gsm_sizek
>> 10);
1472 uma_sizem
= (gms_sizek
+ gsm_sizek
) >> 10;
1474 /* TSEG 2M, This amount can easily be covered by SMRR MTRR's,
1475 which requires to have TSEG_BASE aligned to TSEG_SIZE. */
1476 pci_update_config8(PCI_DEV(0, 0, 0), D0F0_ESMRAMC
, ~0x07, (1 << 1) | (1 << 0));
1480 const unsigned int mmio_size
= get_mmio_size();
1481 const unsigned int MMIOstart
= 4096 - mmio_size
+ uma_sizem
;
1482 const int me_active
= pci_read_config8(PCI_DEV(0, 3, 0), PCI_CLASS_REVISION
) != 0xff;
1483 const unsigned int ME_SIZE
= prejedec
|| !me_active
? 0 : 32;
1484 const unsigned int usedMEsize
= (total_mb
[0] != total_mb
[1]) ? ME_SIZE
: 2 * ME_SIZE
;
1485 const unsigned int claimCapable
=
1486 !(pci_read_config32(PCI_DEV(0, 0, 0), D0F0_CAPID0
+ 4) & (1 << (47 - 32)));
1488 const unsigned int TOM
= total_mb
[0] + total_mb
[1];
1489 unsigned int TOMminusME
= TOM
- usedMEsize
;
1490 unsigned int TOLUD
= (TOMminusME
< MMIOstart
) ? TOMminusME
: MMIOstart
;
1491 unsigned int TOUUD
= TOMminusME
;
1492 unsigned int REMAPbase
= 0xffff, REMAPlimit
= 0;
1494 if (claimCapable
&& (TOMminusME
>= (MMIOstart
+ 64))) {
1495 /* 64MB alignment: We'll lose some MBs here, if ME is on. */
1496 TOMminusME
&= ~(64 - 1);
1497 /* 64MB alignment: Loss will be reclaimed. */
1499 if (TOMminusME
> 4096) {
1500 REMAPbase
= TOMminusME
;
1501 REMAPlimit
= REMAPbase
+ (4096 - TOLUD
);
1504 REMAPlimit
= REMAPbase
+ (TOMminusME
- TOLUD
);
1507 /* REMAPlimit is an inclusive bound, all others exclusive. */
1511 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOM
, (TOM
>> 7) & 0x1ff);
1512 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOLUD
, TOLUD
<< 4);
1513 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_TOUUD
, TOUUD
);
1514 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_REMAPBASE
, (REMAPbase
>> 6) & 0x03ff);
1515 pci_write_config16(PCI_DEV(0, 0, 0), D0F0_REMAPLIMIT
, (REMAPlimit
>> 6) & 0x03ff);
1517 /* Program channel mode. */
1519 case CHANNEL_MODE_SINGLE
:
1520 printk(BIOS_DEBUG
, "Memory configured in single-channel mode.\n");
1521 mchbar_clrbits32(DCC_MCHBAR
, DCC_INTERLEAVED
);
1523 case CHANNEL_MODE_DUAL_ASYNC
:
1524 printk(BIOS_DEBUG
, "Memory configured in dual-channel asymmetric mode.\n");
1525 mchbar_clrbits32(DCC_MCHBAR
, DCC_INTERLEAVED
);
1527 case CHANNEL_MODE_DUAL_INTERLEAVED
:
1528 printk(BIOS_DEBUG
, "Memory configured in dual-channel interleaved mode.\n");
1529 mchbar_clrbits32(DCC_MCHBAR
, DCC_NO_CHANXOR
| 1 << 9);
1530 mchbar_setbits32(DCC_MCHBAR
, DCC_INTERLEAVED
);
1534 printk(BIOS_SPEW
, "Memory map:\n"
1538 "REMAP:\t base = %5uMB\n"
1539 "\t limit = %5uMB\n"
1540 "usedMEsize: %dMB\n",
1541 TOM
, TOLUD
, TOUUD
, REMAPbase
, REMAPlimit
, usedMEsize
);
1543 static void prejedec_memory_map(const dimminfo_t
*const dimms
, channel_mode_t mode
)
1545 /* Never use dual-interleaved mode in pre-jedec config. */
1546 if (CHANNEL_MODE_DUAL_INTERLEAVED
== mode
)
1547 mode
= CHANNEL_MODE_DUAL_ASYNC
;
1549 program_memory_map(dimms
, mode
, 1, 0);
1550 mchbar_setbits32(DCC_MCHBAR
, DCC_NO_CHANXOR
);
1553 static void ddr3_select_clock_mux(const mem_clock_t ddr3clock
,
1554 const dimminfo_t
*const dimms
,
1555 const stepping_t stepping
)
1557 const int clk1067
= (ddr3clock
== MEM_CLOCK_1067MT
);
1558 const int cardF
[] = { CHANNEL_IS_CARDF(dimms
, 0), CHANNEL_IS_CARDF(dimms
, 1) };
1562 if (stepping
< STEPPING_B1
)
1563 die("Stepping <B1 unsupported in clock-multiplexer selection.\n");
1565 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
) {
1567 if ((1 == ch
) && (!CHANNEL_IS_POPULATED(dimms
, 0) || (cardF
[0] != cardF
[1])))
1569 const unsigned int b
= 0x14b0 + (ch
* 0x0100);
1570 mchbar_write32(b
+ 0x1c, (mchbar_read32(b
+ 0x1c) & ~(7 << 11)) |
1571 ((( cardF
[ch
])?1:0) << 11) | mixed
);
1572 mchbar_write32(b
+ 0x18, (mchbar_read32(b
+ 0x18) & ~(7 << 11)) | mixed
);
1573 mchbar_write32(b
+ 0x14, (mchbar_read32(b
+ 0x14) & ~(7 << 11)) |
1574 (((!clk1067
&& !cardF
[ch
])?0:1) << 11) | mixed
);
1575 mchbar_write32(b
+ 0x10, (mchbar_read32(b
+ 0x10) & ~(7 << 11)) |
1576 ((( clk1067
&& !cardF
[ch
])?1:0) << 11) | mixed
);
1577 mchbar_write32(b
+ 0x0c, (mchbar_read32(b
+ 0x0c) & ~(7 << 11)) |
1578 ((( cardF
[ch
])?3:2) << 11) | mixed
);
1579 mchbar_write32(b
+ 0x08, (mchbar_read32(b
+ 0x08) & ~(7 << 11)) |
1581 mchbar_write32(b
+ 0x04, (mchbar_read32(b
+ 0x04) & ~(7 << 11)) |
1582 (((!clk1067
&& !cardF
[ch
])?2:3) << 11) | mixed
);
1583 mchbar_write32(b
+ 0x00, (mchbar_read32(b
+ 0x00) & ~(7 << 11)) |
1584 ((( clk1067
&& !cardF
[ch
])?3:2) << 11) | mixed
);
1588 static void ddr3_write_io_init(const mem_clock_t ddr3clock
,
1589 const dimminfo_t
*const dimms
,
1590 const stepping_t stepping
,
1593 const int a1step
= stepping
>= STEPPING_CONVERSION_A1
;
1594 const int cardF
[] = { CHANNEL_IS_CARDF(dimms
, 0), CHANNEL_IS_CARDF(dimms
, 1) };
1598 if (stepping
< STEPPING_B1
)
1599 die("Stepping <B1 unsupported in write i/o initialization.\n");
1601 die("SFF platform unsupported in write i/o initialization.\n");
1603 static const u32 ddr3_667_800_by_stepping_ddr3_and_card
[][2][2][4] = {
1604 { /* Stepping B3 and below */
1606 { 0xa3255008, 0x26888209, 0x26288208, 0x6188040f },
1607 { 0x7524240b, 0xa5255608, 0x232b8508, 0x5528040f },
1610 { 0xa6255308, 0x26888209, 0x212b7508, 0x6188040f },
1611 { 0x7524240b, 0xa6255708, 0x132b7508, 0x5528040f },
1614 { /* Conversion stepping A1 and above */
1616 { 0xc5257208, 0x26888209, 0x26288208, 0x6188040f },
1617 { 0x7524240b, 0xc5257608, 0x232b8508, 0x5528040f },
1620 { 0xb6256308, 0x26888209, 0x212b7508, 0x6188040f },
1621 { 0x7524240b, 0xb6256708, 0x132b7508, 0x5528040f },
1625 static const u32 ddr3_1067_by_channel_and_card
[][2][4] = {
1627 { 0xb2254708, 0x002b7408, 0x132b8008, 0x7228060f },
1628 { 0xb0255008, 0xa4254108, 0x4528b409, 0x9428230f },
1631 { 0xa4254208, 0x022b6108, 0x132b8208, 0x9228210f },
1632 { 0x6024140b, 0x92244408, 0x252ba409, 0x9328360c },
1636 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
) {
1637 if ((1 == ch
) && CHANNEL_IS_POPULATED(dimms
, 0) && (cardF
[0] == cardF
[1]))
1638 /* Only write if second channel population differs. */
1640 const u32
*const data
= (ddr3clock
!= MEM_CLOCK_1067MT
)
1641 ? ddr3_667_800_by_stepping_ddr3_and_card
[a1step
][2 - ddr3clock
][cardF
[ch
]]
1642 : ddr3_1067_by_channel_and_card
[ch
][cardF
[ch
]];
1643 mchbar_write32(CxWRTy_MCHBAR(ch
, 0), data
[0]);
1644 mchbar_write32(CxWRTy_MCHBAR(ch
, 1), data
[1]);
1645 mchbar_write32(CxWRTy_MCHBAR(ch
, 2), data
[2]);
1646 mchbar_write32(CxWRTy_MCHBAR(ch
, 3), data
[3]);
1649 mchbar_write32(0x1490, 0x00e70067);
1650 mchbar_write32(0x1494, 0x000d8000);
1651 mchbar_write32(0x1590, 0x00e70067);
1652 mchbar_write32(0x1594, 0x000d8000);
1655 static void ddr_read_io_init(const mem_clock_t ddr_clock
,
1656 const dimminfo_t
*const dimms
,
1661 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
) {
1663 const unsigned int base
= 0x14b0 + (ch
* 0x0100);
1664 for (addr
= base
+ 0x1c; addr
>= base
; addr
-= 4) {
1665 tmp
= mchbar_read32(addr
);
1666 tmp
&= ~((3 << 25) | (1 << 8) | (7 << 16) | (0xf << 20) | (1 << 27));
1668 switch (ddr_clock
) {
1669 case MEM_CLOCK_667MT
:
1670 tmp
|= (1 << 16) | (4 << 20);
1672 case MEM_CLOCK_800MT
:
1673 tmp
|= (2 << 16) | (3 << 20);
1675 case MEM_CLOCK_1067MT
:
1677 tmp
|= (2 << 16) | (1 << 20);
1679 tmp
|= (2 << 16) | (2 << 20);
1684 mchbar_write32(addr
, tmp
);
1689 static void ddr3_memory_io_init(const mem_clock_t ddr3clock
,
1690 const dimminfo_t
*const dimms
,
1691 const stepping_t stepping
,
1696 if (stepping
< STEPPING_B1
)
1697 die("Stepping <B1 unsupported in "
1698 "system-memory i/o initialization.\n");
1700 tmp
= mchbar_read32(0x1400);
1702 tmp
|= (1<<9) | (1<<13);
1703 mchbar_write32(0x1400, tmp
);
1705 tmp
= mchbar_read32(0x140c);
1706 tmp
&= ~(0xff | (1<<11) | (1<<12) |
1707 (1<<16) | (1<<18) | (1<<27) | (0xf<<28));
1708 tmp
|= (1<<7) | (1<<11) | (1<<16);
1709 switch (ddr3clock
) {
1710 case MEM_CLOCK_667MT
:
1713 case MEM_CLOCK_800MT
:
1716 case MEM_CLOCK_1067MT
:
1720 mchbar_write32(0x140c, tmp
);
1722 mchbar_clrbits32(0x1440, 1);
1724 tmp
= mchbar_read32(0x1414);
1725 tmp
&= ~((1<<20) | (7<<11) | (0xf << 24) | (0xf << 16));
1727 switch (ddr3clock
) {
1728 case MEM_CLOCK_667MT
:
1729 tmp
|= (2 << 24) | (10 << 16);
1731 case MEM_CLOCK_800MT
:
1732 tmp
|= (3 << 24) | (7 << 16);
1734 case MEM_CLOCK_1067MT
:
1735 tmp
|= (4 << 24) | (4 << 16);
1738 mchbar_write32(0x1414, tmp
);
1740 mchbar_clrbits32(0x1418, 1 << 3 | 1 << 11 | 1 << 19 | 1 << 27);
1742 mchbar_clrbits32(0x141c, 1 << 3 | 1 << 11 | 1 << 19 | 1 << 27);
1744 mchbar_setbits32(0x1428, 1 << 14);
1746 tmp
= mchbar_read32(0x142c);
1747 tmp
&= ~((0xf << 8) | (0x7 << 20) | 0xf | (0xf << 24));
1748 tmp
|= (0x3 << 20) | (5 << 24);
1749 switch (ddr3clock
) {
1750 case MEM_CLOCK_667MT
:
1751 tmp
|= (2 << 8) | 0xc;
1753 case MEM_CLOCK_800MT
:
1754 tmp
|= (3 << 8) | 0xa;
1756 case MEM_CLOCK_1067MT
:
1757 tmp
|= (4 << 8) | 0x7;
1760 mchbar_write32(0x142c, tmp
);
1762 tmp
= mchbar_read32(0x400);
1763 tmp
&= ~((3 << 4) | (3 << 16) | (3 << 30));
1764 tmp
|= (2 << 4) | (2 << 16);
1765 mchbar_write32(0x400, tmp
);
1767 mchbar_clrbits32(0x404, 0xf << 20);
1769 mchbar_clrbits32(0x40c, 1 << 6);
1771 tmp
= mchbar_read32(0x410);
1774 mchbar_write32(0x410, tmp
);
1776 tmp
= mchbar_read32(0x41c);
1779 mchbar_write32(0x41c, tmp
);
1781 ddr3_select_clock_mux(ddr3clock
, dimms
, stepping
);
1783 ddr3_write_io_init(ddr3clock
, dimms
, stepping
, sff
);
1785 ddr_read_io_init(ddr3clock
, dimms
, sff
);
1788 static void ddr2_select_clock_mux(const dimminfo_t
*const dimms
)
1792 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
) {
1793 const unsigned int b
= 0x14b0 + (ch
* 0x0100);
1794 for (o
= 0; o
< 0x20; o
+= 4)
1795 mchbar_clrbits32(b
+ o
, 7 << 11);
1799 static void ddr2_write_io_init(const dimminfo_t
*const dimms
)
1803 mchbar_clrsetbits32(CxWRTy_MCHBAR(0, 0), 0xf7bff71f, 0x008b0008);
1805 for (s
= 1; s
< 4; ++s
) {
1806 mchbar_clrsetbits32(CxWRTy_MCHBAR(0, s
), 0xf7bff71f, 0x00800000);
1809 mchbar_clrsetbits32(0x1490, 0xf7fff77f, 0x00800000);
1810 mchbar_clrsetbits32(0x1494, 0xf71f8000, 0x00040000);
1812 mchbar_clrsetbits32(CxWRTy_MCHBAR(1, 0), 0xf7bff71f, 0x00890008);
1814 for (s
= 1; s
< 4; ++s
) {
1815 mchbar_clrsetbits32(CxWRTy_MCHBAR(1, s
), 0xf7bff71f, 0x00890000);
1818 mchbar_clrsetbits32(0x1590, 0xf7fff77f, 0x00800000);
1819 mchbar_clrsetbits32(0x1594, 0xf71f8000, 0x00040000);
1822 static void ddr2_memory_io_init(const mem_clock_t ddr2clock
,
1823 const dimminfo_t
*const dimms
,
1824 const stepping_t stepping
,
1830 if (stepping
< STEPPING_B1
)
1831 die("Stepping <B1 unsupported in DDR2 memory i/o initialization.\n");
1833 die("SFF platform unsupported in DDR2 memory i/o initialization.\n");
1835 tmp
= mchbar_read32(0x140c);
1836 tmp
&= ~(0xff | (1<<11) | (0xf<<28));
1837 tmp
|= (1<<0) | (1<<12) | (1<<16) | (1<<18) | (1<<27);
1838 mchbar_write32(0x140c, tmp
);
1840 tmp
= mchbar_read32(0x1440);
1842 tmp
|= (1<<0) | (1<<2) | (1<<3) | (1<<4) | (1<<6);
1843 mchbar_write32(0x1440, tmp
);
1845 tmp
= mchbar_read32(0x1414);
1846 tmp
&= ~((1<<20) | (7<<11) | (0xf << 24) | (0xf << 16));
1848 tmp2
= mchbar_read32(0x142c);
1849 tmp2
&= ~((0xf << 8) | (0x7 << 20) | 0xf);
1850 tmp2
|= (0x3 << 20);
1851 switch (ddr2clock
) {
1852 case MEM_CLOCK_667MT
:
1853 tmp
|= (2 << 24) | (10 << 16);
1854 tmp2
|= (2 << 8) | 0xc;
1856 case MEM_CLOCK_800MT
:
1857 tmp
|= (3 << 24) | (7 << 16);
1858 tmp2
|= (3 << 8) | 0xa;
1863 mchbar_write32(0x1414, tmp
);
1864 mchbar_write32(0x142c, tmp2
);
1866 mchbar_clrbits32(0x1418, (1<<3) | (1<<11) | (1<<19) | (1<<27));
1867 mchbar_clrbits32(0x141c, (1<<3) | (1<<11) | (1<<19) | (1<<27));
1869 tmp
= mchbar_read32(0x400);
1870 tmp
&= ~((3 << 4) | (3 << 16) | (3 << 30));
1871 tmp
|= (2 << 4) | (2 << 16);
1872 mchbar_write32(0x400, tmp
);
1874 mchbar_clrbits32(0x404, 0xf << 20);
1876 mchbar_clrbits32(0x40c, 1 << 6);
1878 tmp
= mchbar_read32(0x410);
1879 tmp
&= ~(0xf << 28);
1881 mchbar_write32(0x410, tmp
);
1883 tmp
= mchbar_read32(0x41c);
1884 tmp
&= ~((7<<0) | (7<<4));
1885 tmp
|= (1<<0) | (1<<3) | (1<<4) | (1<<7);
1886 mchbar_write32(0x41c, tmp
);
1888 ddr2_select_clock_mux(dimms
);
1890 ddr2_write_io_init(dimms
);
1892 ddr_read_io_init(ddr2clock
, dimms
, sff
);
1895 static void jedec_command(const uintptr_t rankaddr
, const u32 cmd
, const u32 val
)
1897 mchbar_clrsetbits32(DCC_MCHBAR
, DCC_SET_EREG_MASK
, cmd
);
1898 read32p(rankaddr
| val
);
1901 static void jedec_init_ddr3(const timings_t
*const timings
,
1902 const dimminfo_t
*const dimms
)
1904 if ((timings
->tWR
< 5) || (timings
->tWR
> 12))
1905 die("tWR value unsupported in Jedec initialization.\n");
1909 /* 5 6 7 8 9 10 11 12 */
1910 static const u8 wr_lut
[] = { 1, 2, 3, 4, 5, 5, 6, 6 };
1912 const int WL
= ((timings
->tWL
- 5) & 7) << 6;
1913 const int ODT_120OHMS
= (1 << 9);
1914 const int ODS_34OHMS
= (1 << 4);
1915 const int WR
= (wr_lut
[timings
->tWR
- 5] & 7) << 12;
1916 const int DLL1
= 1 << 11;
1917 const int CAS
= ((timings
->CAS
- 4) & 7) << 7;
1918 const int INTERLEAVED
= 1 << 6;/* This is READ Burst Type == interleaved. */
1921 FOR_EACH_POPULATED_RANK(dimms
, ch
, r
) {
1922 /* We won't do this in dual-interleaved mode,
1923 so don't care about the offset.
1924 Mirrored ranks aren't taken into account here. */
1925 const uintptr_t rankaddr
= raminit_get_rank_addr(ch
, r
);
1926 printk(BIOS_DEBUG
, "JEDEC init @0x%08x\n", (u32
)rankaddr
);
1928 jedec_command(rankaddr
, DCC_SET_EREGx(2), WL
);
1929 jedec_command(rankaddr
, DCC_SET_EREGx(3), 0);
1930 jedec_command(rankaddr
, DCC_SET_EREGx(1), ODT_120OHMS
| ODS_34OHMS
);
1931 jedec_command(rankaddr
, DCC_SET_MREG
, WR
| DLL1
| CAS
| INTERLEAVED
);
1932 jedec_command(rankaddr
, DCC_SET_MREG
, WR
| CAS
| INTERLEAVED
);
1936 static void jedec_init_ddr2(const timings_t
*const timings
,
1937 const dimminfo_t
*const dimms
)
1939 /* All bit offsets are off by 3 (2^3 bytes bus width). */
1941 /* Mode Register (MR) settings */
1942 const int WR
= ((timings
->tWR
- 1) & 7) << 12;
1943 const int DLLreset
= 1 << 11;
1944 const int CAS
= (timings
->CAS
& 7) << 7;
1945 const int BTinterleaved
= 1 << 6;
1946 const int BL8
= 3 << 3;
1948 /* Extended Mode Register 1 (EMR1) */
1949 const int OCDdefault
= 7 << 10;
1950 const int ODT_150OHMS
= 1 << 9 | 0 << 5;
1953 FOR_EACH_POPULATED_RANK(dimms
, ch
, r
) {
1954 /* We won't do this in dual-interleaved mode,
1955 so don't care about the offset.
1956 Mirrored ranks aren't taken into account here. */
1957 const uintptr_t rankaddr
= raminit_get_rank_addr(ch
, r
);
1958 printk(BIOS_DEBUG
, "JEDEC init @0x%08x\n", (u32
)rankaddr
);
1960 jedec_command(rankaddr
, DCC_CMD_ABP
, 0);
1961 jedec_command(rankaddr
, DCC_SET_EREGx(2), 0);
1962 jedec_command(rankaddr
, DCC_SET_EREGx(3), 0);
1963 jedec_command(rankaddr
, DCC_SET_EREGx(1), ODT_150OHMS
);
1964 jedec_command(rankaddr
, DCC_SET_MREG
, WR
| DLLreset
| CAS
| BTinterleaved
| BL8
);
1965 jedec_command(rankaddr
, DCC_CMD_ABP
, 0);
1966 jedec_command(rankaddr
, DCC_CMD_CBR
, 0);
1970 jedec_command(rankaddr
, DCC_SET_MREG
, WR
| CAS
| BTinterleaved
| BL8
);
1971 jedec_command(rankaddr
, DCC_SET_EREGx(1), OCDdefault
| ODT_150OHMS
);
1972 jedec_command(rankaddr
, DCC_SET_EREGx(1), ODT_150OHMS
);
1976 static void jedec_init(const int spd_type
,
1977 const timings_t
*const timings
,
1978 const dimminfo_t
*const dimms
)
1980 /* Pre-jedec settings */
1981 mchbar_setbits32(0x40, 1 << 1);
1982 mchbar_setbits32(0x230, 3 << 1);
1983 mchbar_setbits32(0x238, 3 << 24);
1984 mchbar_setbits32(0x23c, 3 << 24);
1986 /* Normal write pointer operation */
1987 mchbar_setbits32(0x14f0, 1 << 9);
1988 mchbar_setbits32(0x15f0, 1 << 9);
1990 mchbar_clrsetbits32(DCC_MCHBAR
, DCC_CMD_MASK
, DCC_CMD_NOP
);
1992 pci_and_config8(PCI_DEV(0, 0, 0), 0xf0, ~(1 << 2));
1994 pci_or_config8(PCI_DEV(0, 0, 0), 0xf0, 1 << 2);
1997 if (spd_type
== DDR2
) {
1998 jedec_init_ddr2(timings
, dimms
);
1999 } else if (spd_type
== DDR3
) {
2000 jedec_init_ddr3(timings
, dimms
);
2004 static void ddr3_calibrate_zq(void) {
2007 u32 tmp
= mchbar_read32(DCC_MCHBAR
);
2009 tmp
|= (5 << 16); /* ZQ calibration mode */
2010 mchbar_write32(DCC_MCHBAR
, tmp
);
2012 mchbar_setbits32(CxDRT6_MCHBAR(0), 1 << 3);
2013 mchbar_setbits32(CxDRT6_MCHBAR(1), 1 << 3);
2017 mchbar_clrbits32(CxDRT6_MCHBAR(0), 1 << 3);
2018 mchbar_clrbits32(CxDRT6_MCHBAR(1), 1 << 3);
2020 mchbar_setbits32(DCC_MCHBAR
, 7 << 16); /* Normal operation */
2023 static void post_jedec_sequence(const int cores
) {
2024 const int quadcore
= cores
== 4;
2026 mchbar_clrbits32(0x0040, 1 << 1);
2027 mchbar_clrbits32(0x0230, 3 << 1);
2028 mchbar_setbits32(0x0230, 1 << 15);
2029 mchbar_clrbits32(0x0230, 1 << 19);
2030 mchbar_write32(0x1250, 0x6c4);
2031 mchbar_write32(0x1350, 0x6c4);
2032 mchbar_write32(0x1254, 0x871a066d);
2033 mchbar_write32(0x1354, 0x871a066d);
2034 mchbar_setbits32(0x0238, 1 << 26);
2035 mchbar_clrbits32(0x0238, 3 << 24);
2036 mchbar_setbits32(0x0238, 1 << 23);
2037 mchbar_clrsetbits32(0x0238, 7 << 20, 3 << 20);
2038 mchbar_clrsetbits32(0x0238, 7 << 17, 6 << 17);
2039 mchbar_clrsetbits32(0x0238, 7 << 14, 6 << 14);
2040 mchbar_clrsetbits32(0x0238, 7 << 11, 6 << 11);
2041 mchbar_clrsetbits32(0x0238, 7 << 8, 6 << 8);
2042 mchbar_clrbits32(0x023c, 3 << 24);
2043 mchbar_clrbits32(0x023c, 1 << 23);
2044 mchbar_clrsetbits32(0x023c, 7 << 20, 3 << 20);
2045 mchbar_clrsetbits32(0x023c, 7 << 17, 6 << 17);
2046 mchbar_clrsetbits32(0x023c, 7 << 14, 6 << 14);
2047 mchbar_clrsetbits32(0x023c, 7 << 11, 6 << 11);
2048 mchbar_clrsetbits32(0x023c, 7 << 8, 6 << 8);
2051 mchbar_setbits32(0xb14, 0xbfbf << 16);
2055 static void dram_optimizations(const timings_t
*const timings
,
2056 const dimminfo_t
*const dimms
)
2060 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
) {
2061 const unsigned int mchbar
= CxDRC1_MCHBAR(ch
);
2062 u32 cxdrc1
= mchbar_read32(mchbar
);
2063 cxdrc1
&= ~CxDRC1_SSDS_MASK
;
2064 if (dimms
[ch
].ranks
== 1)
2065 cxdrc1
|= CxDRC1_SS
;
2067 cxdrc1
|= CxDRC1_DS
;
2068 mchbar_write32(mchbar
, cxdrc1
);
2072 u32
raminit_get_rank_addr(unsigned int channel
, unsigned int rank
)
2074 if (!channel
&& !rank
)
2075 return 0; /* Address of first rank */
2077 /* Read the bound of the previous rank. */
2081 rank
= 3; /* Highest rank per channel */
2084 const u32 reg
= mchbar_read32(CxDRBy_MCHBAR(channel
, rank
));
2085 /* Bound is in 32MB. */
2086 return ((reg
& CxDRBy_BOUND_MASK(rank
)) >> CxDRBy_BOUND_SHIFT(rank
)) << 25;
2089 void raminit_reset_readwrite_pointers(void)
2091 mchbar_setbits32(0x1234, 1 << 6);
2092 mchbar_clrbits32(0x1234, 1 << 6);
2093 mchbar_setbits32(0x1334, 1 << 6);
2094 mchbar_clrbits32(0x1334, 1 << 6);
2095 mchbar_clrbits32(0x14f0, 1 << 9);
2096 mchbar_setbits32(0x14f0, 1 << 9);
2097 mchbar_setbits32(0x14f0, 1 << 10);
2098 mchbar_clrbits32(0x15f0, 1 << 9);
2099 mchbar_setbits32(0x15f0, 1 << 9);
2100 mchbar_setbits32(0x15f0, 1 << 10);
2103 void raminit(sysinfo_t
*const sysinfo
, const int s3resume
)
2105 const dimminfo_t
*const dimms
= sysinfo
->dimms
;
2106 const timings_t
*const timings
= &sysinfo
->selected_timings
;
2110 timestamp_add_now(TS_INITRAM_START
);
2112 /* Wait for some bit, maybe TXT clear. */
2113 if (sysinfo
->txt_enabled
) {
2114 while (!(read8((u8
*)0xfed40000) & (1 << 7))) {}
2117 /* Collect information about DIMMs and find common settings. */
2118 collect_dimm_config(sysinfo
);
2120 /* Check for bad warm boot. */
2121 reset_on_bad_warmboot();
2123 /***** From now on, program according to collected infos: *****/
2125 /* Program DRAM type. */
2126 switch (sysinfo
->spd_type
) {
2128 mchbar_setbits8(0x1434, 1 << 7);
2131 mchbar_setbits8(0x1434, 3 << 0);
2135 /* Program system memory frequency. */
2136 set_system_memory_frequency(timings
);
2137 /* Program IGD memory frequency. */
2138 set_igd_memory_frequencies(sysinfo
);
2140 /* Configure DRAM control mode for populated channels. */
2141 configure_dram_control_mode(timings
, dimms
);
2143 /* Initialize RCOMP. */
2144 rcomp_initialization(sysinfo
->spd_type
, sysinfo
->stepping
, sysinfo
->sff
);
2146 /* Power-up DRAM. */
2147 dram_powerup(sysinfo
->spd_type
, sysinfo
->stepping
, s3resume
);
2148 /* Program DRAM timings. */
2149 dram_program_timings(sysinfo
->spd_type
, timings
);
2150 /* Program number of banks. */
2151 dram_program_banks(dimms
);
2152 /* Enable DRAM clock pairs for populated DIMMs. */
2153 FOR_EACH_POPULATED_CHANNEL(dimms
, ch
)
2154 mchbar_setbits32(CxDCLKDIS_MCHBAR(ch
), CxDCLKDIS_ENABLE
);
2156 /* Enable On-Die Termination. */
2157 if (sysinfo
->spd_type
== DDR2
)
2158 ddr2_odt_setup(timings
, sysinfo
->sff
);
2160 ddr3_odt_setup(timings
, sysinfo
->sff
);
2161 /* Miscellaneous settings. */
2162 misc_settings(timings
, sysinfo
->stepping
);
2163 /* Program clock crossing registers. */
2164 clock_crossing_setup(timings
->fsb_clock
, timings
->mem_clock
, dimms
);
2165 /* Program egress VC1 timings. */
2166 vc1_program_timings(timings
->fsb_clock
);
2167 /* Perform system-memory i/o initialization. */
2168 if (sysinfo
->spd_type
== DDR2
) {
2169 ddr2_memory_io_init(timings
->mem_clock
, dimms
,
2170 sysinfo
->stepping
, sysinfo
->sff
);
2172 ddr3_memory_io_init(timings
->mem_clock
, dimms
,
2173 sysinfo
->stepping
, sysinfo
->sff
);
2176 /* Initialize memory map with dummy values of 128MB per rank with a
2177 page size of 4KB. This makes the JEDEC initialization code easier. */
2178 prejedec_memory_map(dimms
, timings
->channel_mode
);
2180 /* Perform JEDEC initialization of DIMMS. */
2181 jedec_init(sysinfo
->spd_type
, timings
, dimms
);
2182 /* Some programming steps after JEDEC initialization. */
2183 post_jedec_sequence(sysinfo
->cores
);
2185 /* Announce normal operation, initialization completed. */
2186 mchbar_setbits32(DCC_MCHBAR
, 0x7 << 16 | 0x1 << 19);
2188 pci_or_config8(PCI_DEV(0, 0, 0), 0xf0, 1 << 2);
2190 pci_and_config8(PCI_DEV(0, 0, 0), 0xf0, ~(1 << 2));
2192 /* Take a breath (the reader). */
2194 /* Perform ZQ calibration for DDR3. */
2195 if (sysinfo
->spd_type
== DDR3
)
2196 ddr3_calibrate_zq();
2198 /* Perform receive-enable calibration. */
2199 raminit_receive_enable_calibration(sysinfo
->spd_type
, timings
, dimms
);
2200 /* Lend clock values from receive-enable calibration. */
2201 mchbar_clrsetbits32(CxDRT5_MCHBAR(0), 0xf0,
2202 (((mchbar_read32(CxDRT3_MCHBAR(0)) >> 7) - 1) & 0xf) << 4);
2203 mchbar_clrsetbits32(CxDRT5_MCHBAR(1), 0xf0,
2204 (((mchbar_read32(CxDRT3_MCHBAR(1)) >> 7) - 1) & 0xf) << 4);
2206 /* Perform read/write training for high clock rate. */
2207 if (timings
->mem_clock
== MEM_CLOCK_1067MT
) {
2208 raminit_read_training(dimms
, s3resume
);
2209 raminit_write_training(timings
->mem_clock
, dimms
, s3resume
);
2212 igd_compute_ggc(sysinfo
);
2214 /* Program final memory map (with real values). */
2215 program_memory_map(dimms
, timings
->channel_mode
, 0, sysinfo
->ggc
);
2217 /* Some last optimizations. */
2218 dram_optimizations(timings
, dimms
);
2220 /* Mark raminit being finished. :-) */
2221 pci_and_config8(PCI_DEV(0, 0x1f, 0), 0xa2, (u8
)~(1 << 7));
2223 raminit_thermal(sysinfo
);
2226 timestamp_add_now(TS_INITRAM_END
);