4 * Karthik Dasu <karthik-dp@ti.com>
7 * Texas Instruments, <www.ti.com>
8 * Richard Woodruff <r-woodruff2@ti.com>
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2 of
13 * the License, or (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 #include <linux/linkage.h>
26 #include <asm/assembler.h>
27 #include <plat/sram.h>
30 #include "cm2xxx_3xxx.h"
31 #include "prm2xxx_3xxx.h"
36 * Registers access definitions
38 #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
39 #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
40 (SDRC_SCRATCHPAD_SEM_OFFS)
41 #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
43 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
44 #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
45 #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
46 #define SRAM_BASE_P OMAP3_SRAM_PA
47 #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
48 #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
49 OMAP36XX_CONTROL_MEM_RTA_CTRL)
51 /* Move this as correct place is available */
52 #define SCRATCHPAD_MEM_OFFS 0x310
53 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
54 OMAP343X_CONTROL_MEM_WKUP +\
56 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
57 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
58 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
59 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
60 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
61 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
62 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
63 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
64 #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
65 #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
68 * This file needs be built unconditionally as ARM to interoperate correctly
69 * with non-Thumb-2-capable firmware.
78 * The "get_*restore_pointer" functions are used to provide a
79 * physical restore address where the ROM code jumps while waking
80 * up from MPU OFF/OSWR state.
81 * The restore pointer is stored into the scratchpad.
85 /* Function call to get the restore pointer for resume from OFF */
86 ENTRY(get_restore_pointer)
87 stmfd sp!, {lr} @ save registers on stack
89 ldmfd sp!, {pc} @ restore regs and return
90 ENDPROC(get_restore_pointer)
92 ENTRY(get_restore_pointer_sz)
93 .word . - get_restore_pointer
96 /* Function call to get the restore pointer for 3630 resume from OFF */
97 ENTRY(get_omap3630_restore_pointer)
98 stmfd sp!, {lr} @ save registers on stack
100 ldmfd sp!, {pc} @ restore regs and return
101 ENDPROC(get_omap3630_restore_pointer)
103 ENTRY(get_omap3630_restore_pointer_sz)
104 .word . - get_omap3630_restore_pointer
107 /* Function call to get the restore pointer for ES3 to resume from OFF */
108 ENTRY(get_es3_restore_pointer)
109 stmfd sp!, {lr} @ save registers on stack
111 ldmfd sp!, {pc} @ restore regs and return
112 ENDPROC(get_es3_restore_pointer)
114 ENTRY(get_es3_restore_pointer_sz)
115 .word . - get_es3_restore_pointer
119 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
120 * This function sets up a flag that will allow for this toggling to take
121 * place on 3630. Hopefully some version in the future may not need this.
123 ENTRY(enable_omap3630_toggle_l2_on_restore)
124 stmfd sp!, {lr} @ save registers on stack
125 /* Setup so that we will disable and enable l2 */
127 adrl r2, l2dis_3630 @ may be too distant for plain adr
129 ldmfd sp!, {pc} @ restore regs and return
130 ENDPROC(enable_omap3630_toggle_l2_on_restore)
133 /* Function to call rom code to save secure ram context */
135 ENTRY(save_secure_ram_context)
136 stmfd sp!, {r1-r12, lr} @ save registers on stack
137 adr r3, api_params @ r3 points to parameters
138 str r0, [r3,#0x4] @ r0 has sdram address
141 ldr r12, sram_phy_addr_mask
143 mov r0, #25 @ set service ID for PPA
144 mov r12, r0 @ copy secure service ID in r12
145 mov r1, #0 @ set task id for ROM code in r1
146 mov r2, #4 @ set some flags in r2, r6
148 dsb @ data write barrier
149 dmb @ data memory barrier
150 smc #1 @ call SMI monitor (smi #1)
155 ldmfd sp!, {r1-r12, pc}
162 .word 0x4, 0x0, 0x0, 0x1, 0x1
163 ENDPROC(save_secure_ram_context)
164 ENTRY(save_secure_ram_context_sz)
165 .word . - save_secure_ram_context
168 * ======================
169 * == Idle entry point ==
170 * ======================
174 * Forces OMAP into idle state
176 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
177 * and executes the WFI instruction. Calling WFI effectively changes the
178 * power domains states to the desired target power states.
182 * - this code gets copied to internal SRAM at boot and after wake-up
183 * from OFF mode. The execution pointer in SRAM is _omap_sram_idle.
184 * - when the OMAP wakes up it continues at different execution points
185 * depending on the low power mode (non-OFF vs OFF modes),
186 * cf. 'Resume path for xxx mode' comments.
189 ENTRY(omap34xx_cpu_suspend)
190 stmfd sp!, {r0-r12, lr} @ save registers on stack
193 * r0 contains CPU context save/restore pointer in sdram
194 * r1 contains information about saving context:
195 * 0 - No context lost
196 * 1 - Only L1 and logic lost
197 * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
198 * 3 - Both L1 and L2 lost and logic lost
201 /* Directly jump to WFI is the context save is not required */
205 /* Otherwise fall through to the save context code */
207 mov r8, r0 @ Store SDRAM address in r8
208 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
209 mov r4, #0x1 @ Number of parameters for restore call
210 stmia r8!, {r4-r5} @ Push parameters for restore call
211 mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
212 stmia r8!, {r4-r5} @ Push parameters for restore call
214 /* Check what that target sleep state is from r1 */
215 cmp r1, #0x2 @ Only L2 lost, no need to save context
219 mov r4, sp @ Store sp
220 mrs r5, spsr @ Store spsr
221 mov r6, lr @ Store lr
224 mrc p15, 0, r4, c1, c0, 2 @ Coprocessor access control register
225 mrc p15, 0, r5, c2, c0, 0 @ TTBR0
226 mrc p15, 0, r6, c2, c0, 1 @ TTBR1
227 mrc p15, 0, r7, c2, c0, 2 @ TTBCR
230 mrc p15, 0, r4, c3, c0, 0 @ Domain access Control Register
231 mrc p15, 0, r5, c10, c2, 0 @ PRRR
232 mrc p15, 0, r6, c10, c2, 1 @ NMRR
235 mrc p15, 0, r4, c13, c0, 1 @ Context ID
236 mrc p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID
237 mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address
238 mrs r7, cpsr @ Store current cpsr
241 mrc p15, 0, r4, c1, c0, 0 @ save control register
246 * jump out to kernel flush routine
247 * - reuse that code is better
248 * - it executes in a cached space so is faster than refetch per-block
249 * - should be faster and will change with kernel
250 * - 'might' have to copy address, load and jump to it
251 * Flush all data from the L1 data cache before disabling
259 * Clear the SCTLR.C bit to prevent further data cache
260 * allocation. Clearing SCTLR.C would make all the data accesses
261 * strongly ordered and would not hit the cache.
263 mrc p15, 0, r0, c1, c0, 0
264 bic r0, r0, #(1 << 2) @ Disable the C bit
265 mcr p15, 0, r0, c1, c0, 0
269 * Invalidate L1 data cache. Even though only invalidate is
270 * necessary exported flush API is used here. Doing clean
271 * on already clean cache would be almost NOP.
276 * The kernel doesn't interwork: v7_flush_dcache_all in particluar will
277 * always return in Thumb state when CONFIG_THUMB2_KERNEL is enabled.
278 * This sequence switches back to ARM. Note that .align may insert a
279 * nop: bx pc needs to be word-aligned in order to work.
288 ldr r4, sdrc_power @ read the SDRC_POWER register
289 ldr r5, [r4] @ read the contents of SDRC_POWER
290 orr r5, r5, #0x40 @ enable self refresh on idle req
291 str r5, [r4] @ write back to SDRC_POWER register
293 /* Data memory barrier and Data sync barrier */
298 * ===================================
299 * == WFI instruction => Enter idle ==
300 * ===================================
302 wfi @ wait for interrupt
305 * ===================================
306 * == Resume path for non-OFF modes ==
307 * ===================================
321 mrc p15, 0, r0, c1, c0, 0
322 tst r0, #(1 << 2) @ Check C bit enabled?
323 orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
324 mcreq p15, 0, r0, c1, c0, 0
328 * ===================================
329 * == Exit point from non-OFF modes ==
330 * ===================================
332 ldmfd sp!, {r0-r12, pc} @ restore regs and return
336 * ==============================
337 * == Resume path for OFF mode ==
338 * ==============================
342 * The restore_* functions are called by the ROM code
343 * when back from WFI in OFF mode.
344 * Cf. the get_*restore_pointer functions.
346 * restore_es3: applies to 34xx >= ES3.0
347 * restore_3630: applies to 36xx
348 * restore: common code for 3xxx
351 ldr r5, pm_prepwstst_core_p
354 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
358 ldr r2, es3_sdrc_fix_sz
361 ldmia r0!, {r3} @ val = *src
362 stmia r1!, {r3} @ *dst = val
363 subs r2, r2, #0x1 @ num_words--
370 ldr r1, pm_prepwstst_core_p
373 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
375 /* Disable RTA before giving control */
376 ldr r1, control_mem_rta
377 mov r2, #OMAP36XX_RTA_DISABLE
380 /* Fall through to common code for the remaining logic */
384 * Check what was the reason for mpu reset and store the reason in r9:
385 * 0 - No context lost
386 * 1 - Only L1 and logic lost
387 * 2 - Only L2 lost - In this case, we wont be here
388 * 3 - Both L1 and L2 lost
390 ldr r1, pm_pwstctrl_mpu
393 cmp r2, #0x0 @ Check if target power state was OFF or RET
394 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
395 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
399 cmp r0, #0x1 @ should we disable L2 on 3630?
401 mrc p15, 0, r0, c1, c0, 1
402 bic r0, r0, #2 @ disable L2 cache
403 mcr p15, 0, r0, c1, c0, 1
410 mov r0, #40 @ set service ID for PPA
411 mov r12, r0 @ copy secure Service ID in r12
412 mov r1, #0 @ set task id for ROM code in r1
413 mov r2, #4 @ set some flags in r2, r6
415 adr r3, l2_inv_api_params @ r3 points to dummy parameters
416 dsb @ data write barrier
417 dmb @ data memory barrier
418 smc #1 @ call SMI monitor (smi #1)
419 /* Write to Aux control register to set some bits */
420 mov r0, #42 @ set service ID for PPA
421 mov r12, r0 @ copy secure Service ID in r12
422 mov r1, #0 @ set task id for ROM code in r1
423 mov r2, #4 @ set some flags in r2, r6
425 ldr r4, scratchpad_base
426 ldr r3, [r4, #0xBC] @ r3 points to parameters
427 dsb @ data write barrier
428 dmb @ data memory barrier
429 smc #1 @ call SMI monitor (smi #1)
431 #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
432 /* Restore L2 aux control register */
433 @ set service ID for PPA
434 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
435 mov r12, r0 @ copy service ID in r12
436 mov r1, #0 @ set task ID for ROM code in r1
437 mov r2, #4 @ set some flags in r2, r6
439 ldr r4, scratchpad_base
441 adds r3, r3, #8 @ r3 points to parameters
442 dsb @ data write barrier
443 dmb @ data memory barrier
444 smc #1 @ call SMI monitor (smi #1)
452 /* Execute smi to invalidate L2 cache */
453 mov r12, #0x1 @ set up to invalidate L2
454 smc #0 @ Call SMI monitor (smieq)
455 /* Write to Aux control register to set some bits */
456 ldr r4, scratchpad_base
460 smc #0 @ Call SMI monitor (smieq)
461 ldr r4, scratchpad_base
465 smc #0 @ Call SMI monitor (smieq)
468 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
470 mrc p15, 0, r1, c1, c0, 1
471 orr r1, r1, #2 @ re-enable L2 cache
472 mcr p15, 0, r1, c1, c0, 1
476 * Invalidate all instruction caches to PoU
477 * and flush branch target cache
479 mcr p15, 0, r1, c7, c5, 0
481 ldr r4, scratchpad_base
486 mov sp, r4 @ Restore sp
487 msr spsr_cxsf, r5 @ Restore spsr
488 mov lr, r6 @ Restore lr
491 mcr p15, 0, r4, c1, c0, 2 @ Coprocessor access Control Register
492 mcr p15, 0, r5, c2, c0, 0 @ TTBR0
493 mcr p15, 0, r6, c2, c0, 1 @ TTBR1
494 mcr p15, 0, r7, c2, c0, 2 @ TTBCR
497 mcr p15, 0, r4, c3, c0, 0 @ Domain access Control Register
498 mcr p15, 0, r5, c10, c2, 0 @ PRRR
499 mcr p15, 0, r6, c10, c2, 1 @ NMRR
503 mcr p15, 0, r4, c13, c0, 1 @ Context ID
504 mcr p15, 0, r5, c13, c0, 2 @ User r/w thread and process ID
505 mrc p15, 0, r6, c12, c0, 0 @ Secure or NS vector base address
506 msr cpsr, r7 @ store cpsr
508 /* Enabling MMU here */
509 mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl
510 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */
516 * More work needs to be done to support N[0:2] value other than 0
517 * So looping here so that the error can be detected
521 mrc p15, 0, r2, c2, c0, 0
525 ldr r5, table_index_mask
526 and r4, r5 @ r4 = 31 to 20 bits of pc
527 /* Extract the value to be written to table entry */
529 /* r1 has the value to be written to table entry*/
531 /* Getting the address of table entry to modify */
533 /* r2 has the location which needs to be modified */
535 /* Storing previous entry of location being modified */
536 ldr r5, scratchpad_base
539 /* Modify the table entry */
542 * Storing address of entry being modified
543 * - will be restored after enabling MMU
545 ldr r5, scratchpad_base
549 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
550 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
551 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
552 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
554 * Restore control register. This enables the MMU.
555 * The caches and prediction are not enabled here, they
556 * will be enabled after restoring the MMU table entry.
559 /* Store previous value of control register in scratchpad */
561 ldr r2, cache_pred_disable_mask
563 mcr p15, 0, r4, c1, c0, 0
566 ldr r0, =restoremmu_on
570 * ==============================
571 * == Exit point from OFF mode ==
572 * ==============================
575 ldmfd sp!, {r0-r12, pc} @ restore regs and return
582 /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */
586 ldr r4, sdrc_syscfg @ get config addr
587 ldr r5, [r4] @ get value
588 tst r5, #0x100 @ is part access blocked
590 biceq r5, r5, #0x100 @ clear bit if set
591 str r5, [r4] @ write back change
592 ldr r4, sdrc_mr_0 @ get config addr
593 ldr r5, [r4] @ get value
594 str r5, [r4] @ write back change
595 ldr r4, sdrc_emr2_0 @ get config addr
596 ldr r5, [r4] @ get value
597 str r5, [r4] @ write back change
598 ldr r4, sdrc_manual_0 @ get config addr
599 mov r5, #0x2 @ autorefresh command
600 str r5, [r4] @ kick off refreshes
601 ldr r4, sdrc_mr_1 @ get config addr
602 ldr r5, [r4] @ get value
603 str r5, [r4] @ write back change
604 ldr r4, sdrc_emr2_1 @ get config addr
605 ldr r5, [r4] @ get value
606 str r5, [r4] @ write back change
607 ldr r4, sdrc_manual_1 @ get config addr
608 mov r5, #0x2 @ autorefresh command
609 str r5, [r4] @ kick off refreshes
614 .word SDRC_SYSCONFIG_P
620 .word SDRC_MANUAL_0_P
626 .word SDRC_MANUAL_1_P
627 ENDPROC(es3_sdrc_fix)
628 ENTRY(es3_sdrc_fix_sz)
629 .word . - es3_sdrc_fix
632 * This function implements the erratum ID i581 WA:
633 * SDRC state restore before accessing the SDRAM
635 * Only used at return from non-OFF mode. For OFF
636 * mode the ROM code configures the SDRC and
637 * the DPLL before calling the restore code directly
641 /* Make sure SDRC accesses are ok */
644 /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
645 ldr r4, cm_idlest_ckgen
651 ldr r4, cm_idlest1_core
656 /* allow DLL powerdown upon hw idle req */
663 * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a
665 * Be careful not to clobber r7 when maintaing this code.
669 /* Is dll in lock mode? */
670 ldr r4, sdrc_dlla_ctrl
673 bxne lr @ Return if locked
674 /* wait till dll locks */
677 ldr r4, wait_dll_lock_counter
679 str r4, [r7, #wait_dll_lock_counter - kick_counter]
680 ldr r4, sdrc_dlla_status
681 /* Wait 20uS for lock */
690 bx lr @ Return when locked
692 /* disable/reenable DLL if not locked */
694 ldr r4, sdrc_dlla_ctrl
697 bic r6, #(1<<3) @ disable dll
700 orr r6, r6, #(1<<3) @ enable dll
705 str r4, [r7] @ kick_counter
706 b wait_dll_lock_timed
710 .word CM_IDLEST1_CORE_V
712 .word CM_IDLEST_CKGEN_V
714 .word SDRC_DLLA_STATUS_V
716 .word SDRC_DLLA_CTRL_V
718 .word PM_PREPWSTST_CORE_P
720 .word PM_PWSTCTRL_MPU_P
722 .word SCRATCHPAD_BASE_P
724 .word SRAM_BASE_P + 0x8000
733 cache_pred_disable_mask:
738 .word CONTROL_MEM_RTA_CTRL
740 .word v7_flush_dcache_all
744 * When exporting to userspace while the counters are in SRAM,
745 * these 2 words need to be at the end to facilitate retrival!
749 wait_dll_lock_counter:
751 ENDPROC(omap34xx_cpu_suspend)
753 ENTRY(omap34xx_cpu_suspend_sz)
754 .word . - omap34xx_cpu_suspend