1 /* SPDX-License-Identifier: GPL-2.0 */
3 * Low level suspend code for AM43XX SoCs
5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/
6 * Dave Gerlach, Vaibhav Bedia
9 #include <linux/linkage.h>
10 #include <linux/ti-emif-sram.h>
11 #include <linux/platform_data/pm33xx.h>
12 #include <asm/assembler.h>
13 #include <asm/hardware/cache-l2x0.h>
14 #include <asm/memory.h>
19 #include "omap-secure.h"
21 #include "pm-asm-offsets.h"
25 /* replicated define because linux/bitops.h cannot be included in assembly */
26 #define BIT(nr) (1 << (nr))
28 #define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000
29 #define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003
30 #define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002
32 #define AM43XX_EMIF_POWEROFF_ENABLE 0x1
33 #define AM43XX_EMIF_POWEROFF_DISABLE 0x0
35 #define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1
36 #define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3
38 #define AM43XX_CM_BASE 0x44DF0000
40 #define AM43XX_CM_REGADDR(inst, reg) \
41 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
43 #define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
44 AM43XX_CM_MPU_MPU_CDOFFS)
45 #define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
46 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
47 #define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
48 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
49 #define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030
51 #define RTC_SECONDS_REG 0x0
52 #define RTC_PMIC_REG 0x98
53 #define RTC_PMIC_POWER_EN BIT(16)
54 #define RTC_PMIC_EXT_WAKEUP_STS BIT(12)
55 #define RTC_PMIC_EXT_WAKEUP_POL BIT(4)
56 #define RTC_PMIC_EXT_WAKEUP_EN BIT(0)
64 stmfd sp!, {r4 - r11, lr} @ save registers on stack
66 /* Save wfi_flags arg to data space */
68 adr r3, am43xx_pm_ro_sram_data
69 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
70 str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
72 #ifdef CONFIG_CACHE_L2X0
73 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */
74 ldr r1, get_l2cache_base
79 /* Only flush cache is we know we are losing MPU context */
80 tst r4, #WFI_FLAG_FLUSH_CACHE
84 * Flush all data from the L1 and L2 data cache before disabling
91 * Clear the SCTLR.C bit to prevent further data cache
92 * allocation. Clearing SCTLR.C would make all the data accesses
93 * strongly ordered and would not hit the cache.
95 mrc p15, 0, r0, c1, c0, 0
96 bic r0, r0, #(1 << 2) @ Disable the C bit
97 mcr p15, 0, r0, c1, c0, 0
102 * Invalidate L1 and L2 data cache.
107 #ifdef CONFIG_CACHE_L2X0
109 * Clean and invalidate the L2 cache.
111 #ifdef CONFIG_PL310_ERRATA_727915
113 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
119 adr r4, am43xx_pm_ro_sram_data
120 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
123 ldr r0, [r2, #L2X0_AUX_CTRL]
124 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
125 ldr r0, [r2, #L310_PREFETCH_CTRL]
126 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
129 str r0, [r2, #L2X0_CLEAN_INV_WAY]
131 ldr r0, [r2, #L2X0_CLEAN_INV_WAY]
135 #ifdef CONFIG_PL310_ERRATA_727915
137 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
146 str r0, [r2, #L2X0_CACHE_SYNC]
148 ldr r0, [r2, #L2X0_CACHE_SYNC]
153 /* Restore wfi_flags */
154 adr r3, am43xx_pm_ro_sram_data
155 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
156 ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
160 * If we are trying to enter RTC+DDR mode we must perform
161 * a read from the rtc address space to ensure translation
162 * presence in the TLB to avoid page table walk after DDR
165 tst r4, #WFI_FLAG_RTC_ONLY
166 beq skip_rtc_va_refresh
168 adr r3, am43xx_pm_ro_sram_data
169 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
173 /* Check if we want self refresh */
174 tst r4, #WFI_FLAG_SELF_REFRESH
175 beq emif_skip_enter_sr
177 adr r9, am43xx_emif_sram_table
179 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
183 /* Only necessary if PER is losing context */
184 tst r4, #WFI_FLAG_SAVE_EMIF
187 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
191 /* Only can disable EMIF if we have entered self refresh */
192 tst r4, #WFI_FLAG_SELF_REFRESH
193 beq emif_skip_disable
196 ldr r1, am43xx_virt_emif_clkctrl
198 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
203 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
205 bne wait_emif_disable
208 tst r4, #WFI_FLAG_RTC_ONLY
211 adr r3, am43xx_pm_ro_sram_data
212 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
214 ldr r0, [r1, #RTC_PMIC_REG]
215 orr r0, r0, #RTC_PMIC_POWER_EN
216 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_STS
217 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_EN
218 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_POL
219 str r0, [r1, #RTC_PMIC_REG]
220 ldr r0, [r1, #RTC_PMIC_REG]
221 /* Wait for 2 seconds to lose power */
223 ldr r2, [r1, #RTC_SECONDS_REG]
225 ldr r0, [r1, #RTC_SECONDS_REG]
236 tst r4, #WFI_FLAG_WAKE_M3
240 * For the MPU WFI to be registered as an interrupt
241 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
244 ldr r1, am43xx_virt_mpu_clkctrl
246 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
250 * Put MPU CLKDM to SW_SLEEP
252 ldr r1, am43xx_virt_mpu_clkstctrl
253 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
258 * Execute a barrier instruction to ensure that all cache,
259 * TLB and branch predictor maintenance operations issued
266 * Execute a WFI instruction and wait until the
267 * STANDBYWFI output is asserted to indicate that the
268 * CPU is in idle and low power state. CPU can specualatively
269 * prefetch the instructions so add NOPs after WFI. Sixteen
270 * NOPs as per Cortex-A9 pipeline.
291 /* We come here in case of an abort due to a late interrupt */
292 ldr r1, am43xx_virt_mpu_clkstctrl
293 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
296 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
297 ldr r1, am43xx_virt_mpu_clkctrl
298 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
303 ldr r1, am43xx_virt_emif_clkctrl
304 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
311 tst r4, #WFI_FLAG_FLUSH_CACHE
312 beq cache_skip_restore
315 * Set SCTLR.C bit to allow data cache allocation
317 mrc p15, 0, r0, c1, c0, 0
318 orr r0, r0, #(1 << 2) @ Enable the C bit
319 mcr p15, 0, r0, c1, c0, 0
323 /* Only necessary if PER is losing context */
324 tst r4, #WFI_FLAG_SELF_REFRESH
325 beq emif_skip_exit_sr_abt
327 adr r9, am43xx_emif_sram_table
328 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
331 emif_skip_exit_sr_abt:
332 /* Let the suspend code know about the abort */
334 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
335 ENDPROC(am43xx_do_wfi)
338 ENTRY(am43xx_resume_offset)
339 .word . - am43xx_do_wfi
341 ENTRY(am43xx_resume_from_deep_sleep)
342 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
343 ldr r1, am43xx_virt_mpu_clkstctrl
344 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
347 /* For AM43xx, use EMIF power down until context is restored */
348 ldr r2, am43xx_phys_emif_poweroff
349 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE
353 ldr r1, am43xx_phys_emif_clkctrl
354 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
359 bne wait_emif_enable1
361 adr r9, am43xx_emif_sram_table
363 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
366 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
369 ldr r2, am43xx_phys_emif_poweroff
370 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE
373 ldr r1, [r9, #EMIF_PM_RUN_HW_LEVELING]
376 #ifdef CONFIG_CACHE_L2X0
377 ldr r2, l2_cache_base
378 ldr r0, [r2, #L2X0_CTRL]
381 beq skip_l2en @ Skip if already enabled
383 adr r4, am43xx_pm_ro_sram_data
384 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
385 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
392 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
398 /* L2 invalidate on resume */
400 ldr r2, l2_cache_base
401 str r0, [r2, #L2X0_INV_WAY]
403 ldr r0, [r2, #L2X0_INV_WAY]
407 #ifdef CONFIG_PL310_ERRATA_727915
409 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
415 ldr r2, l2_cache_base
417 str r0, [r2, #L2X0_CACHE_SYNC]
419 ldr r0, [r2, #L2X0_CACHE_SYNC]
430 /* We are back. Branch to the common CPU resume routine */
433 ENDPROC(am43xx_resume_from_deep_sleep)
440 .word v7_flush_dcache_all
444 am43xx_phys_emif_poweroff:
445 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
446 AM43XX_PRM_EMIF_CTRL_OFFSET)
447 am43xx_virt_mpu_clkstctrl:
448 .word (AM43XX_CM_MPU_CLKSTCTRL)
449 am43xx_virt_mpu_clkctrl:
450 .word (AM43XX_CM_MPU_MPU_CLKCTRL)
451 am43xx_virt_emif_clkctrl:
452 .word (AM43XX_CM_PER_EMIF_CLKCTRL)
453 am43xx_phys_emif_clkctrl:
454 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
455 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
457 #ifdef CONFIG_CACHE_L2X0
458 /* L2 cache related defines for AM437x */
460 .word omap4_get_l2cache_base
462 .word OMAP44XX_L2CACHE_BASE
464 .word OMAP4_MON_L2X0_PREFETCH_INDEX
466 .word OMAP4_MON_L2X0_AUXCTRL_INDEX
468 .word OMAP4_MON_L2X0_CTRL_INDEX
474 /* DDR related defines */
475 ENTRY(am43xx_emif_sram_table)
476 .space EMIF_PM_FUNCTIONS_SIZE
478 ENTRY(am43xx_pm_sram)
480 .word am43xx_do_wfi_sz
481 .word am43xx_resume_offset
482 .word am43xx_emif_sram_table
483 .word am43xx_pm_ro_sram_data
486 .word cpu_resume - PAGE_OFFSET + 0x80000000
489 ENTRY(am43xx_pm_ro_sram_data)
490 .space AMX3_PM_RO_SRAM_DATA_SIZE
492 ENTRY(am43xx_do_wfi_sz)
493 .word . - am43xx_do_wfi