1 /* SPDX-License-Identifier: Apache-2.0 */
2 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 /* See the file LICENSE for further information */
5 #ifndef _SIFIVE_SDRAM_H
6 #define _SIFIVE_SDRAM_H
8 #include <console/console.h>
9 #include <device/mmio.h>
10 #include <soc/addressmap.h>
11 #include <soc/sdram.h>
15 #define DRAM_CLASS_OFFSET 8
16 #define DRAM_CLASS_DDR4 0xA
17 #define OPTIMAL_RMODW_EN_OFFSET 0
18 #define DISABLE_RD_INTERLEAVE_OFFSET 16
19 #define OUT_OF_RANGE_FLAG (1 << 1)
20 #define MULTIPLE_OUT_OF_RANGE_FLAG (1 << 2)
21 #define PORT_COMMAND_CHANNEL_ERROR_FLAG (1 << 7)
22 #define MC_INIT_COMPLETE_FLAG (1 << 8) // Memory Controller init complete
23 #define LEVELING_OPERATION_COMPLETED_FLAG (1 << 22)
24 #define DFI_PHY_WRLELV_MODE_OFFSET 24
25 #define DFI_PHY_RDLVL_MODE_OFFSET 24
26 #define DFI_PHY_RDLVL_GATE_MODE_OFFSET 0
27 #define VREF_EN_OFFSET 24
28 #define PORT_ADDR_PROTECTION_EN_OFFSET 0
29 #define AXI0_ADDRESS_RANGE_ENABLE_OFFSET 8
30 #define AXI0_RANGE_PROT_BITS_0_OFFSET 24
31 #define RDLVL_EN_OFFSET 16
32 #define RDLVL_GATE_EN_OFFSET 24
33 #define WRLVL_EN_OFFSET 0
35 #define PHY_RX_CAL_DQ0_0_OFFSET 0
36 #define PHY_RX_CAL_DQ1_0_OFFSET 16
38 // reference: fu740-c000 manual chapter 32: DDR Subsystem
39 // Cahpter 32.2: Memory Map
40 #define FU740_DDRCTRL 0x100b0000
41 #define FU740_DDRPHY 0x100b2000
42 #define FU740_PHYSICAL_FILTER 0x100b8000 // formerly called DDRBUSBLOCKER (FU540)
43 #define FU740_DDRMGMT 0x100c0000
45 static void phy_reset(u32
*ddrphyreg
, const u32
*physettings
)
47 for (int i
= 1152; i
<= 1214; i
++)
48 write32(&ddrphyreg
[i
], physettings
[i
]);
50 for (int i
= 0; i
<= 1151; i
++)
51 write32(&ddrphyreg
[i
], physettings
[i
]);
54 static void ux00ddr_writeregmap(u32
*ahbregaddr
, const u32
*ctlsettings
, const u32
*physettings
)
56 u32
*ddrctlreg
= (u32
*) ahbregaddr
;
57 u32
*ddrphyreg
= ((u32
*) ahbregaddr
) + (0x2000 / sizeof(u32
)); //TODO use FU740_DDRPHY instead
59 for (int i
= 0; i
<= 264; i
++)
60 write32((void *)&ddrctlreg
[i
], ctlsettings
[i
]);
62 phy_reset(ddrphyreg
, physettings
);
65 static void ux00ddr_start(u32
*ahbregaddr
, u64
*filteraddr
, uint64_t ddrend
)
67 // start calibration and training operation
68 setbits32(ahbregaddr
, 0x1);
70 // wait for memory initialization complete
71 // bit 8 of INT_STATUS (DENALI_CTL_132) 0x210
72 while (!(read32(&ahbregaddr
[132]) & MC_INIT_COMPLETE_FLAG
))
75 // Disable the BusBlocker in front of the controller AXI slave ports
76 write64(filteraddr
, 0x0f00000000000000UL
| (ddrend
>> 2));
80 static void ux00ddr_mask_mc_init_complete_interrupt(u32
*ahbregaddr
)
82 // Mask off Bit 8 of Interrupt Status
83 // Bit [8] The MC initialization has been completed
84 setbits32(&ahbregaddr
[136], MC_INIT_COMPLETE_FLAG
);
87 static void ux00ddr_mask_outofrange_interrupts(u32
*ahbregaddr
)
89 // Mask off Bit 8, Bit 2 and Bit 1 of Interrupt Status
90 // Bit [2] Multiple accesses outside the defined PHYSICAL memory space have occurred
91 // Bit [1] A memory access outside the defined PHYSICAL memory space has occurred
92 setbits32(&ahbregaddr
[136], OUT_OF_RANGE_FLAG
| MULTIPLE_OUT_OF_RANGE_FLAG
);
95 static void ux00ddr_mask_port_command_error_interrupt(u32
*ahbregaddr
)
97 // Mask off Bit 7 of Interrupt Status
98 // Bit [7] An error occurred on the port command channel
99 setbits32(&ahbregaddr
[136], PORT_COMMAND_CHANNEL_ERROR_FLAG
);
102 static void ux00ddr_mask_leveling_completed_interrupt(u32
*ahbregaddr
)
104 // Mask off Bit 22 of Interrupt Status
105 // Bit [22] The leveling operation has completed
106 setbits32(&ahbregaddr
[136], LEVELING_OPERATION_COMPLETED_FLAG
);
109 static void ux00ddr_setuprangeprotection(u32
*ahbregaddr
, size_t size
)
111 write32(&ahbregaddr
[209], 0x0);
112 u32 size_16Kblocks
= ((size
>> 14) & 0x7FFFFF) - 1;
113 write32(&ahbregaddr
[210], size_16Kblocks
);
114 write32(&ahbregaddr
[212], 0x0);
115 write32(&ahbregaddr
[214], 0x0);
116 write32(&ahbregaddr
[216], 0x0);
117 setbits32(&ahbregaddr
[224], (0x3 << AXI0_RANGE_PROT_BITS_0_OFFSET
));
118 write32(&ahbregaddr
[225], 0xFFFFFFFF);
119 setbits32(&ahbregaddr
[208], (1 << AXI0_ADDRESS_RANGE_ENABLE_OFFSET
));
120 setbits32(&ahbregaddr
[208], (1 << PORT_ADDR_PROTECTION_EN_OFFSET
));
123 static void ux00ddr_disableaxireadinterleave(u32
*ahbregaddr
)
125 setbits32(&ahbregaddr
[120], (1 << DISABLE_RD_INTERLEAVE_OFFSET
));
128 static void ux00ddr_disableoptimalrmodw(u32
*ahbregaddr
)
130 clrbits32(&ahbregaddr
[21], (1 << OPTIMAL_RMODW_EN_OFFSET
));
133 static void ux00ddr_enablewriteleveling(u32
*ahbregaddr
)
135 setbits32(&ahbregaddr
[170], (1 << WRLVL_EN_OFFSET
) | (1 << DFI_PHY_WRLELV_MODE_OFFSET
));
138 static void ux00ddr_enablereadleveling(u32
*ahbregaddr
)
140 setbits32(&ahbregaddr
[181], (1 << DFI_PHY_RDLVL_MODE_OFFSET
));
141 setbits32(&ahbregaddr
[260], (1 << RDLVL_EN_OFFSET
));
144 static void ux00ddr_enablereadlevelinggate(u32
*ahbregaddr
)
146 setbits32(&ahbregaddr
[260], (1 << RDLVL_GATE_EN_OFFSET
));
147 setbits32(&ahbregaddr
[182], (1 << DFI_PHY_RDLVL_GATE_MODE_OFFSET
));
150 static void ux00ddr_enablevreftraining(u32
*ahbregaddr
)
152 setbits32(&ahbregaddr
[184], (1 << VREF_EN_OFFSET
));
155 static u32
ux00ddr_getdramclass(u32
*ahbregaddr
)
157 return ((read32(ahbregaddr
) >> DRAM_CLASS_OFFSET
) & 0xF);
160 static void ux00ddr_phy_fixup(void *ahbregaddr
)
162 void *ddrphyreg
= ahbregaddr
+ 0x2000;
164 // bitmask of failed lanes
169 // check errata condition
170 for (u32 slice
= 0; slice
< 8; slice
++) {
171 u32 regbase
= slicebase
+ 34;
172 for (u32 reg
= 0 ; reg
< 4; reg
++) {
173 u32 updownreg
= read32(ddrphyreg
+ ((regbase
+reg
) << 2));
174 for (u32 bit
= 0; bit
< 2; bit
++) {
175 u32 phy_rx_cal_dqn_0_offset
;
178 phy_rx_cal_dqn_0_offset
= PHY_RX_CAL_DQ0_0_OFFSET
;
180 phy_rx_cal_dqn_0_offset
= PHY_RX_CAL_DQ1_0_OFFSET
;
182 u32 down
= (updownreg
>> phy_rx_cal_dqn_0_offset
) & 0x3F;
183 u32 up
= (updownreg
>> (phy_rx_cal_dqn_0_offset
+ 6)) & 0x3F;
185 uint8_t failc0
= ((down
== 0) && (up
== 0x3F));
186 uint8_t failc1
= ((up
== 0) && (down
== 0x3F));
188 // print error message on failure
189 if (failc0
|| failc1
) {
194 slicelsc
+= (dq
% 10);
195 slicemsc
+= (dq
/ 10);
196 printk(BIOS_ERR
, "S %c%c%c\n", slicelsc
, slicemsc
, failc0
? 'U' : 'D');
204 printk(BIOS_ERR
, "DDR error in fixing up: %llx\n", fails
);
207 extern const u32 denali_ddr_phy_data
[1215];
208 extern const u32 denali_ddr_ctl_data
[265];
210 void sdram_init(size_t dram_size
)
212 u32
*ddrctrl
= (u32
*)FU740_DDRCTRL
;
213 u64
*ddr_physical_filter
= (u64
*)FU740_PHYSICAL_FILTER
;
214 ux00ddr_writeregmap(ddrctrl
, denali_ddr_ctl_data
, denali_ddr_phy_data
);
215 ux00ddr_disableaxireadinterleave(ddrctrl
);
217 ux00ddr_disableoptimalrmodw(ddrctrl
);
219 ux00ddr_enablewriteleveling(ddrctrl
);
220 ux00ddr_enablereadleveling(ddrctrl
);
221 ux00ddr_enablereadlevelinggate(ddrctrl
);
222 if (ux00ddr_getdramclass(ddrctrl
) == DRAM_CLASS_DDR4
)
223 ux00ddr_enablevreftraining(ddrctrl
);
225 // mask off interrupts for leveling completion
226 ux00ddr_mask_leveling_completed_interrupt(ddrctrl
);
228 ux00ddr_mask_mc_init_complete_interrupt(ddrctrl
);
229 ux00ddr_mask_outofrange_interrupts(ddrctrl
);
230 ux00ddr_setuprangeprotection(ddrctrl
, dram_size
);
231 ux00ddr_mask_port_command_error_interrupt(ddrctrl
);
233 ux00ddr_start(ddrctrl
, ddr_physical_filter
, FU740_DRAM
+ dram_size
);
235 ux00ddr_phy_fixup(ddrctrl
);
238 // sdram_init MUST be called before sdram_size
239 size_t sdram_size(void)
241 u64 devicepmp0
= read64((u64
*)FU740_PHYSICAL_FILTER
);
242 return ((devicepmp0
& 0xFFFFFFFFFFFFFF) << 2) - FU740_DRAM
;