1 /* SPDX-License-Identifier: GPL-2.0-only */
3 #ifndef __NORTHBRIDGE_INTEL_GM45_GM45_H__
4 #define __NORTHBRIDGE_INTEL_GM45_GM45_H__
14 typedef enum { /* Steppings below B1 were pre-production,
15 conversion stepping A1 is a newer GL40 with support for 800 MT/s on FSB/DDR.
16 We'll support B1, B2, B3, and conversion stepping A1. */
25 STEPPING_CONVERSION_A1
= 9,
58 CHANNEL_MODE_DUAL_ASYNC
,
59 CHANNEL_MODE_DUAL_INTERLEAVED
,
62 typedef enum { /* as in DDR3 spd */
69 typedef enum { /* as in DDR3 spd */
79 typedef enum { /* as in DDR2 spd */
90 fsb_clock_t fsb_clock
;
91 mem_clock_t mem_clock
;
92 channel_mode_t channel_mode
;
105 unsigned int card_type
; /* 0x0: unpopulated,
106 0xa - 0xf: raw card type A - F */
107 chip_width_t chip_width
;
108 chip_capacity_t chip_capacity
;
109 refresh_rate_t refresh
;
110 unsigned int page_size
; /* of whole DIMM in Bytes (4096 or 8192) */
113 unsigned int rank_capacity_mb
; /* per rank in Megabytes */
116 /* The setup is one DIMM per channel, so there's no need to find a
117 common timing setup between multiple chips (but chip and controller
118 still need to be coordinated */
133 /* to be filled in romstage main: */
135 timings_t selected_timings
;
138 int gs45_low_power_mode
; /* low power mode of GMCH_GS45 */
139 int sff
; /* small form factor option (soldered down DIMM) */
142 #define TOTAL_CHANNELS 2
143 #define CHANNEL_IS_POPULATED(dimms, idx) (dimms[idx].card_type != 0)
144 #define CHANNEL_IS_CARDF(dimms, idx) (dimms[idx].card_type == 0xf)
145 #define IF_CHANNEL_POPULATED(dimms, idx) if (dimms[idx].card_type != 0)
146 #define FOR_EACH_CHANNEL(idx) \
147 for (idx = 0; idx < TOTAL_CHANNELS; ++idx)
148 #define FOR_EACH_POPULATED_CHANNEL(dimms, idx) \
149 FOR_EACH_CHANNEL(idx) IF_CHANNEL_POPULATED(dimms, idx)
151 #define RANKS_PER_CHANNEL 4 /* Only two may be populated */
152 #define IF_RANK_POPULATED(dimms, ch, r) \
153 if (dimms[ch].card_type && ((r) < dimms[ch].ranks))
154 #define FOR_EACH_RANK_IN_CHANNEL(r) \
155 for (r = 0; r < RANKS_PER_CHANNEL; ++r)
156 #define FOR_EACH_POPULATED_RANK_IN_CHANNEL(dimms, ch, r) \
157 FOR_EACH_RANK_IN_CHANNEL(r) IF_RANK_POPULATED(dimms, ch, r)
158 #define FOR_EACH_RANK(ch, r) \
159 FOR_EACH_CHANNEL(ch) FOR_EACH_RANK_IN_CHANNEL(r)
160 #define FOR_EACH_POPULATED_RANK(dimms, ch, r) \
161 FOR_EACH_RANK(ch, r) IF_RANK_POPULATED(dimms, ch, r)
163 #define DDR3_MAX_CAS 18
172 /* Offsets of read/write training results in CMOS.
173 They will be restored upon S3 resumes. */
174 #define CMOS_READ_TRAINING 0x80 /* 16 bytes */
175 #define CMOS_WRITE_TRAINING 0x90 /* 16 bytes (could be reduced to 10 bytes) */
182 #define D0F0_EPBAR_LO 0x40
183 #define D0F0_EPBAR_HI 0x44
184 #define D0F0_MCHBAR_LO 0x48
185 #define D0F0_MCHBAR_HI 0x4c
186 #define D0F0_GGC 0x52
187 #define D0F0_DEVEN 0x54
188 #define D0F0_PCIEXBAR_LO 0x60
189 #define D0F0_PCIEXBAR_HI 0x64
190 #define D0F0_DMIBAR_LO 0x68
191 #define D0F0_DMIBAR_HI 0x6c
192 #define D0F0_PMBASE 0x78
193 #define D0F0_PAM(x) (0x90 + (x)) /* 0-6 */
194 #define D0F0_REMAPBASE 0x98
195 #define D0F0_REMAPLIMIT 0x9a
196 #define D0F0_SMRAM 0x9d
197 #define D0F0_ESMRAMC 0x9e
198 #define D0F0_TOM 0xa0
199 #define D0F0_TOUUD 0xa2
200 #define D0F0_TOLUD 0xb0
201 #define D0F0_SKPD 0xdc /* Scratchpad Data */
202 #define D0F0_CAPID0 0xe0
210 #define D1F0_VCCAP 0x104
211 #define D1F0_VC0RCTL 0x114
214 * Graphics frequencies
216 #define GCFGC_PCIDEV PCI_DEV(0, 2, 0)
217 #define GCFGC_OFFSET 0xf0
218 #define GCFGC_CR_SHIFT 0
219 #define GCFGC_CR_MASK (0xf << GCFGC_CR_SHIFT)
220 #define GCFGC_CS_SHIFT 8
221 #define GCFGC_CS_MASK (0xf << GCFGC_CS_SHIFT)
222 #define GCFGC_CD_SHIFT 12
223 #define GCFGC_CD_MASK (0x1 << GCFGC_CD_SHIFT)
224 #define GCFGC_UPDATE_SHIFT 5
225 #define GCFGC_UPDATE (0x1 << GCFGC_UPDATE_SHIFT)
231 #include <northbridge/intel/common/fixed_bars.h>
233 #define HPLLVCO_MCHBAR 0x0c0f
235 #define PMSTS_MCHBAR 0x0f14 /* Self refresh channel status */
236 #define PMSTS_WARM_RESET (1 << 1)
237 #define PMSTS_BOTH_SELFREFRESH (1 << 0)
239 #define CLKCFG_MCHBAR 0x0c00
240 #define CLKCFG_FSBCLK_SHIFT 0
241 #define CLKCFG_FSBCLK_MASK (7 << CLKCFG_FSBCLK_SHIFT)
242 #define CLKCFG_MEMCLK_SHIFT 4
243 #define CLKCFG_MEMCLK_MASK (7 << CLKCFG_MEMCLK_SHIFT)
244 #define CLKCFG_UPDATE (1 << 12)
246 #define SSKPD_MCHBAR 0x0c1c
247 #define SSKPD_CLK_SHIFT 0
248 #define SSKPD_CLK_MASK (7 << SSKPD_CLK_SHIFT)
250 #define DCC_MCHBAR 0x200
251 #define DCC_NO_CHANXOR (1 << 10)
252 #define DCC_INTERLEAVED (1 << 1)
253 #define DCC_CMD_SHIFT 16
254 #define DCC_CMD_MASK (7 << DCC_CMD_SHIFT)
255 #define DCC_CMD_NOP (1 << DCC_CMD_SHIFT)
256 #define DCC_CMD_ABP (2 << DCC_CMD_SHIFT)
257 /* For mode register mr0: */
258 #define DCC_SET_MREG (3 << DCC_CMD_SHIFT)
259 /* For extended mode registers mr1 to mr3: */
260 #define DCC_SET_EREG (4 << DCC_CMD_SHIFT)
261 #define DCC_SET_EREG_SHIFT 21
262 #define DCC_SET_EREG_MASK (DCC_CMD_MASK | (3 << DCC_SET_EREG_SHIFT))
263 #define DCC_SET_EREGx(x) ((DCC_SET_EREG | \
264 (((x) - 1) << DCC_SET_EREG_SHIFT)) & \
266 #define DCC_CMD_CBR (6 << DCC_CMD_SHIFT)
268 /* Per channel DRAM Row Attribute registers (32-bit) */
269 #define CxDRA_MCHBAR(x) (0x1208 + ((x) * 0x0100))
270 #define CxDRA_PAGESIZE_SHIFT(r) ((r) * 4) /* Per rank r */
271 #define CxDRA_PAGESIZE_MASKr(r) (0x7 << CxDRA_PAGESIZE_SHIFT(r))
272 #define CxDRA_PAGESIZE_MASK 0x0000ffff
273 #define CxDRA_PAGESIZE(r, p) /* for log2(dimm page size in bytes) p */ \
274 ((((p) - 10) << CxDRA_PAGESIZE_SHIFT(r)) & CxDRA_PAGESIZE_MASKr(r))
275 #define CxDRA_BANKS_SHIFT(r) (((r) * 3) + 16)
276 #define CxDRA_BANKS_MASKr(r) (0x3 << CxDRA_BANKS_SHIFT(r))
277 #define CxDRA_BANKS_MASK 0x07ff0000
278 #define CxDRA_BANKS(r, b) /* for number of banks b */ \
279 (((b) << (CxDRA_BANKS_SHIFT(r) - 3)) & CxDRA_BANKS_MASKr(r))
282 * Per channel DRAM Row Boundary registers (32-bit)
283 * Every two ranks share one register and must be programmed at the same time.
284 * All registers (4 ranks per channel) have to be set.
286 #define CxDRBy_MCHBAR(x, r) (0x1200 + ((x) * 0x0100) + (((r) / 2) * 4))
287 #define CxDRBy_BOUND_SHIFT(r) (((r) % 2) * 16)
288 #define CxDRBy_BOUND_MASK(r) (0x1fc << CxDRBy_BOUND_SHIFT(r))
289 #define CxDRBy_BOUND_MB(r, b) /* for boundary in MB b */ \
290 ((((b) >> 5) << CxDRBy_BOUND_SHIFT(r)) & CxDRBy_BOUND_MASK(r))
292 #define CxDRC0_MCHBAR(x) (0x1230 + ((x) * 0x0100))
293 #define CxDRC0_RANKEN0 (1 << 24) /* Rank Enable */
294 #define CxDRC0_RANKEN1 (1 << 25)
295 #define CxDRC0_RANKEN2 (1 << 26)
296 #define CxDRC0_RANKEN3 (1 << 27)
297 #define CxDRC0_RANKEN(r) (1 << (24 + (r)))
298 #define CxDRC0_RANKEN_MASK (0xf << 24)
299 #define CxDRC0_RMS_SHIFT 8 /* Refresh Mode Select */
300 #define CxDRC0_RMS_MASK (7 << CxDRC0_RMS_SHIFT)
301 #define CxDRC0_RMS_78US (2 << CxDRC0_RMS_SHIFT)
302 #define CxDRC0_RMS_39US (3 << CxDRC0_RMS_SHIFT)
304 #define CxDRC1_MCHBAR(x) (0x1234 + ((x) * 0x0100))
305 #define CxDRC1_SSDS_SHIFT 24
306 #define CxDRC1_SSDS_MASK (0xff << CxDRC1_SSDS_SHIFT)
307 #define CxDRC1_DS (0x91 << CxDRC1_SSDS_SHIFT)
308 #define CxDRC1_SS (0xb1 << CxDRC1_SSDS_SHIFT)
309 #define CxDRC1_NOTPOP(r) (1 << (16 + (r))) /* Write 1 for Not Populated */
310 #define CxDRC1_NOTPOP_MASK (0xf << 16)
311 #define CxDRC1_MUSTWR (3 << 11)
313 #define CxDRC2_MCHBAR(x) (0x1238 + ((x) * 0x0100))
314 #define CxDRC2_NOTPOP(r) (1 << (24 + (r))) /* Write 1 for Not Populated */
315 #define CxDRC2_NOTPOP_MASK (0xf << 24)
316 #define CxDRC2_MUSTWR (1 << 12)
317 #define CxDRC2_CLK1067MT (1 << 0)
319 /* DRAM Timing registers (32-bit each) */
320 #define CxDRT0_MCHBAR(x) (0x1210 + ((x) * 0x0100))
321 #define CxDRT0_BtB_WtP_SHIFT 26
322 #define CxDRT0_BtB_WtP_MASK (0x1f << CxDRT0_BtB_WtP_SHIFT)
323 #define CxDRT0_BtB_WtR_SHIFT 20
324 #define CxDRT0_BtB_WtR_MASK (0x1f << CxDRT0_BtB_WtR_SHIFT)
325 #define CxDRT1_MCHBAR(x) (0x1214 + ((x) * 0x0100))
326 #define CxDRT2_MCHBAR(x) (0x1218 + ((x) * 0x0100))
327 #define CxDRT3_MCHBAR(x) (0x121c + ((x) * 0x0100))
328 #define CxDRT4_MCHBAR(x) (0x1220 + ((x) * 0x0100))
329 #define CxDRT5_MCHBAR(x) (0x1224 + ((x) * 0x0100))
330 #define CxDRT6_MCHBAR(x) (0x1228 + ((x) * 0x0100))
332 /* Clock disable registers (32-bit each) */
333 #define CxDCLKDIS_MCHBAR(x) (0x120c + ((x) * 0x0100))
334 #define CxDCLKDIS_MASK 3
335 #define CxDCLKDIS_ENABLE 3 /* Always enable both clock pairs. */
337 /* On-Die-Termination registers (2x 32-bit per channel) */
338 #define CxODT_HIGH(x) (0x124c + ((x) * 0x0100))
339 #define CxODT_LOW(x) (0x1248 + ((x) * 0x0100))
341 /* Write Training registers. */
342 #define CxWRTy_MCHBAR(ch, s) (0x1470 + ((ch) * 0x0100) + ((3 - (s)) * 4))
344 #define CxGTEW(x) (0x1270 + ((x) * 0x100))
345 #define CxGTC(x) (0x1274 + ((x) * 0x100))
346 #define CxDTPEW(x) (0x1278 + ((x) * 0x100))
347 #define CxDTAEW(x) (0x1280 + ((x) * 0x100))
348 #define CxDTC(x) (0x1288 + ((x) * 0x100))
355 #define DMIVCECH 0x000 /* 32bit */
356 #define DMIPVCCAP1 0x004 /* 32bit */
358 #define DMIVC0RCAP 0x010 /* 32bit */
359 #define DMIVC0RCTL 0x014 /* 32bit */
360 #define DMIVC0RSTS 0x01a /* 16bit */
361 #define VC0NP (1 << 1)
363 #define DMIVC1RCAP 0x01c /* 32bit */
364 #define DMIVC1RCTL 0x020 /* 32bit */
365 #define DMIVC1RSTS 0x026 /* 16bit */
366 #define VC1NP (1 << 1)
368 #define DMIESD 0x044 /* 32bit */
370 #define DMILE1D 0x050 /* 32bit */
371 #define DMILE1A 0x058 /* 64bit */
372 #define DMILE2D 0x060 /* 32bit */
373 #define DMILE2A 0x068 /* 64bit */
375 #define DMILCAP 0x084 /* 32bit */
376 #define DMILCTL 0x088 /* 16bit */
377 #define DMILSTS 0x08a /* 16bit */
383 #define EPPVCCAP1 0x004 /* 32bit */
384 #define EPPVCCTL 0x00c /* 32bit */
386 #define EPVC0RCAP 0x010 /* 32bit */
387 #define EPVC0RCTL 0x014 /* 32bit */
388 #define EPVC0RSTS 0x01a /* 16bit */
390 #define EPVC1RCAP 0x01c /* 32bit */
391 #define EPVC1RCTL 0x020 /* 32bit */
392 #define EPVC1RSTS 0x026 /* 16bit */
394 #define EPVC1MTS 0x028 /* 32bit */
395 #define EPVC1ITC 0x02c /* 32bit */
397 #define EPVC1IST 0x038 /* 64bit */
399 #define EPESD 0x044 /* 32bit */
401 #define EPLE1D 0x050 /* 32bit */
402 #define EPLE1A 0x058 /* 64bit */
403 #define EPLE2D 0x060 /* 32bit */
404 #define EPLE2A 0x068 /* 64bit */
406 #define EP_PORTARB(x) (0x100 + 4 * (x)) /* 256bit */
408 void gm45_early_init(void);
409 void gm45_early_reset(void);
411 void enter_raminit_or_reset(void);
412 void get_gmch_info(sysinfo_t
*);
413 void raminit(sysinfo_t
*, int s3resume
);
414 void raminit_thermal(const sysinfo_t
*);
415 void setup_sdram_meminfo(const sysinfo_t
*);
416 void init_igd(const sysinfo_t
*const);
417 void init_pm(const sysinfo_t
*, int do_freq_scaling_cfg
);
418 void igd_compute_ggc(sysinfo_t
*const sysinfo
);
420 int raminit_read_vco_index(void);
421 u32
raminit_get_rank_addr(unsigned int channel
, unsigned int rank
);
423 void raminit_rcomp_calibration(stepping_t stepping
);
424 void raminit_reset_readwrite_pointers(void);
425 void raminit_receive_enable_calibration(int ddr_type
, const timings_t
*, const dimminfo_t
*);
426 void raminit_write_training(const mem_clock_t
, const dimminfo_t
*, int s3resume
);
427 void raminit_read_training(const dimminfo_t
*, int s3resume
);
429 void gm45_late_init(stepping_t
);
431 u32
decode_igd_memory_size(u32 gms
);
432 u32
decode_igd_gtt_size(u32 gsm
);
433 u32
decode_tseg_size(u8 esmramc
);
435 void init_iommu(void);
437 /* romstage mainboard hookups */
438 void mb_setup_superio(void); /* optional */
439 void get_mb_spd_addrmap(u8 spd_addrmap
[4]);
440 void mb_pre_raminit_setup(sysinfo_t
*); /* optional */
441 void mb_post_raminit_setup(void); /* optional */
444 char ascii_string
[13];
445 int pwm_freq
; /* In Hz */
447 int get_blc_values(const struct blc_pwm_t
**entries
);
448 u16
get_blc_pwm_freq_value(void);
450 #include <device/device.h>
454 unsigned long northbridge_write_acpi_tables(const struct device
*device
, unsigned long start
,
455 struct acpi_rsdp
*rsdp
);
457 #endif /* __NORTHBRIDGE_INTEL_GM45_GM45_H__ */