4 * Karthik Dasu <karthik-dp@ti.com>
7 * Texas Instruments, <www.ti.com>
8 * Richard Woodruff <r-woodruff2@ti.com>
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2 of
13 * the License, or (at your option) any later version.
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
18 * GNU General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 #include <linux/linkage.h>
26 #include <asm/assembler.h>
27 #include <plat/sram.h>
30 #include "cm2xxx_3xxx.h"
31 #include "prm2xxx_3xxx.h"
36 * Registers access definitions
38 #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
39 #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
40 (SDRC_SCRATCHPAD_SEM_OFFS)
41 #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
43 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
44 #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
45 #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
46 #define SRAM_BASE_P OMAP3_SRAM_PA
47 #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
48 #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
49 OMAP36XX_CONTROL_MEM_RTA_CTRL)
51 /* Move this as correct place is available */
52 #define SCRATCHPAD_MEM_OFFS 0x310
53 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
54 OMAP343X_CONTROL_MEM_WKUP +\
56 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
57 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
58 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
59 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
60 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
61 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
62 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
63 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
64 #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
65 #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
73 * The "get_*restore_pointer" functions are used to provide a
74 * physical restore address where the ROM code jumps while waking
75 * up from MPU OFF/OSWR state.
76 * The restore pointer is stored into the scratchpad.
80 /* Function call to get the restore pointer for resume from OFF */
81 ENTRY(get_restore_pointer)
82 stmfd sp!, {lr} @ save registers on stack
84 ldmfd sp!, {pc} @ restore regs and return
85 ENTRY(get_restore_pointer_sz)
86 .word . - get_restore_pointer
89 /* Function call to get the restore pointer for 3630 resume from OFF */
90 ENTRY(get_omap3630_restore_pointer)
91 stmfd sp!, {lr} @ save registers on stack
93 ldmfd sp!, {pc} @ restore regs and return
94 ENTRY(get_omap3630_restore_pointer_sz)
95 .word . - get_omap3630_restore_pointer
98 /* Function call to get the restore pointer for ES3 to resume from OFF */
99 ENTRY(get_es3_restore_pointer)
100 stmfd sp!, {lr} @ save registers on stack
102 ldmfd sp!, {pc} @ restore regs and return
103 ENTRY(get_es3_restore_pointer_sz)
104 .word . - get_es3_restore_pointer
108 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
109 * This function sets up a flag that will allow for this toggling to take
110 * place on 3630. Hopefully some version in the future may not need this.
112 ENTRY(enable_omap3630_toggle_l2_on_restore)
113 stmfd sp!, {lr} @ save registers on stack
114 /* Setup so that we will disable and enable l2 */
117 ldmfd sp!, {pc} @ restore regs and return
120 /* Function to call rom code to save secure ram context */
122 ENTRY(save_secure_ram_context)
123 stmfd sp!, {r1-r12, lr} @ save registers on stack
124 adr r3, api_params @ r3 points to parameters
125 str r0, [r3,#0x4] @ r0 has sdram address
128 ldr r12, sram_phy_addr_mask
130 mov r0, #25 @ set service ID for PPA
131 mov r12, r0 @ copy secure service ID in r12
132 mov r1, #0 @ set task id for ROM code in r1
133 mov r2, #4 @ set some flags in r2, r6
135 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
136 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
137 .word 0xE1600071 @ call SMI monitor (smi #1)
142 ldmfd sp!, {r1-r12, pc}
148 .word 0x4, 0x0, 0x0, 0x1, 0x1
149 ENTRY(save_secure_ram_context_sz)
150 .word . - save_secure_ram_context
153 * ======================
154 * == Idle entry point ==
155 * ======================
159 * Forces OMAP into idle state
161 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
162 * and executes the WFI instruction. Calling WFI effectively changes the
163 * power domains states to the desired target power states.
167 * - this code gets copied to internal SRAM at boot and after wake-up
168 * from OFF mode. The execution pointer in SRAM is _omap_sram_idle.
169 * - when the OMAP wakes up it continues at different execution points
170 * depending on the low power mode (non-OFF vs OFF modes),
171 * cf. 'Resume path for xxx mode' comments.
174 ENTRY(omap34xx_cpu_suspend)
175 stmfd sp!, {r0-r12, lr} @ save registers on stack
178 * r0 contains restore pointer in sdram
179 * r1 contains information about saving context:
180 * 0 - No context lost
181 * 1 - Only L1 and logic lost
183 * 3 - Both L1 and L2 lost
186 /* Directly jump to WFI is the context save is not required */
190 /* Otherwise fall through to the save context code */
192 mov r8, r0 @ Store SDRAM address in r8
193 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
194 mov r4, #0x1 @ Number of parameters for restore call
195 stmia r8!, {r4-r5} @ Push parameters for restore call
196 mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
197 stmia r8!, {r4-r5} @ Push parameters for restore call
199 /* Check what that target sleep state is from r1 */
200 cmp r1, #0x2 @ Only L2 lost, no need to save context
204 /* Store sp and spsr to SDRAM */
209 /* Save all ARM registers */
210 /* Coprocessor access control register */
211 mrc p15, 0, r6, c1, c0, 2
213 /* TTBR0, TTBR1 and Translation table base control */
214 mrc p15, 0, r4, c2, c0, 0
215 mrc p15, 0, r5, c2, c0, 1
216 mrc p15, 0, r6, c2, c0, 2
219 * Domain access control register, data fault status register,
220 * and instruction fault status register
222 mrc p15, 0, r4, c3, c0, 0
223 mrc p15, 0, r5, c5, c0, 0
224 mrc p15, 0, r6, c5, c0, 1
227 * Data aux fault status register, instruction aux fault status,
228 * data fault address register and instruction fault address register
230 mrc p15, 0, r4, c5, c1, 0
231 mrc p15, 0, r5, c5, c1, 1
232 mrc p15, 0, r6, c6, c0, 0
233 mrc p15, 0, r7, c6, c0, 2
236 * user r/w thread and process ID, user r/o thread and process ID,
237 * priv only thread and process ID, cache size selection
239 mrc p15, 0, r4, c13, c0, 2
240 mrc p15, 0, r5, c13, c0, 3
241 mrc p15, 0, r6, c13, c0, 4
242 mrc p15, 2, r7, c0, c0, 0
244 /* Data TLB lockdown, instruction TLB lockdown registers */
245 mrc p15, 0, r5, c10, c0, 0
246 mrc p15, 0, r6, c10, c0, 1
248 /* Secure or non secure vector base address, FCSE PID, Context PID*/
249 mrc p15, 0, r4, c12, c0, 0
250 mrc p15, 0, r5, c13, c0, 0
251 mrc p15, 0, r6, c13, c0, 1
253 /* Primary remap, normal remap registers */
254 mrc p15, 0, r4, c10, c2, 0
255 mrc p15, 0, r5, c10, c2, 1
258 /* Store current cpsr*/
262 mrc p15, 0, r4, c1, c0, 0
263 /* save control register */
268 * Clean Data or unified cache to POU
269 * How to invalidate only L1 cache???? - #FIX_ME#
270 * mcr p15, 0, r11, c7, c11, 1
272 cmp r1, #0x1 @ Check whether L2 inval is required
277 * jump out to kernel flush routine
278 * - reuse that code is better
279 * - it executes in a cached space so is faster than refetch per-block
280 * - should be faster and will change with kernel
281 * - 'might' have to copy address, load and jump to it
288 ldr r4, sdrc_power @ read the SDRC_POWER register
289 ldr r5, [r4] @ read the contents of SDRC_POWER
290 orr r5, r5, #0x40 @ enable self refresh on idle req
291 str r5, [r4] @ write back to SDRC_POWER register
293 /* Data memory barrier and Data sync barrier */
295 mcr p15, 0, r1, c7, c10, 4
296 mcr p15, 0, r1, c7, c10, 5
299 * ===================================
300 * == WFI instruction => Enter idle ==
301 * ===================================
303 wfi @ wait for interrupt
306 * ===================================
307 * == Resume path for non-OFF modes ==
308 * ===================================
323 * ===================================
324 * == Exit point from non-OFF modes ==
325 * ===================================
327 ldmfd sp!, {r0-r12, pc} @ restore regs and return
331 * ==============================
332 * == Resume path for OFF mode ==
333 * ==============================
337 * The restore_* functions are called by the ROM code
338 * when back from WFI in OFF mode.
339 * Cf. the get_*restore_pointer functions.
341 * restore_es3: applies to 34xx >= ES3.0
342 * restore_3630: applies to 36xx
343 * restore: common code for 3xxx
346 ldr r5, pm_prepwstst_core_p
349 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
353 ldr r2, es3_sdrc_fix_sz
356 ldmia r0!, {r3} @ val = *src
357 stmia r1!, {r3} @ *dst = val
358 subs r2, r2, #0x1 @ num_words--
365 ldr r1, pm_prepwstst_core_p
368 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
370 /* Disable RTA before giving control */
371 ldr r1, control_mem_rta
372 mov r2, #OMAP36XX_RTA_DISABLE
375 /* Fall through to common code for the remaining logic */
379 * Check what was the reason for mpu reset and store the reason in r9:
380 * 0 - No context lost
381 * 1 - Only L1 and logic lost
382 * 2 - Only L2 lost - In this case, we wont be here
383 * 3 - Both L1 and L2 lost
385 ldr r1, pm_pwstctrl_mpu
388 cmp r2, #0x0 @ Check if target power state was OFF or RET
389 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
390 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
394 cmp r0, #0x1 @ should we disable L2 on 3630?
396 mrc p15, 0, r0, c1, c0, 1
397 bic r0, r0, #2 @ disable L2 cache
398 mcr p15, 0, r0, c1, c0, 1
405 mov r0, #40 @ set service ID for PPA
406 mov r12, r0 @ copy secure Service ID in r12
407 mov r1, #0 @ set task id for ROM code in r1
408 mov r2, #4 @ set some flags in r2, r6
410 adr r3, l2_inv_api_params @ r3 points to dummy parameters
411 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
412 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
413 .word 0xE1600071 @ call SMI monitor (smi #1)
414 /* Write to Aux control register to set some bits */
415 mov r0, #42 @ set service ID for PPA
416 mov r12, r0 @ copy secure Service ID in r12
417 mov r1, #0 @ set task id for ROM code in r1
418 mov r2, #4 @ set some flags in r2, r6
420 ldr r4, scratchpad_base
421 ldr r3, [r4, #0xBC] @ r3 points to parameters
422 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
423 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
424 .word 0xE1600071 @ call SMI monitor (smi #1)
426 #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
427 /* Restore L2 aux control register */
428 @ set service ID for PPA
429 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
430 mov r12, r0 @ copy service ID in r12
431 mov r1, #0 @ set task ID for ROM code in r1
432 mov r2, #4 @ set some flags in r2, r6
434 ldr r4, scratchpad_base
436 adds r3, r3, #8 @ r3 points to parameters
437 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
438 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
439 .word 0xE1600071 @ call SMI monitor (smi #1)
446 /* Execute smi to invalidate L2 cache */
447 mov r12, #0x1 @ set up to invalidate L2
448 .word 0xE1600070 @ Call SMI monitor (smieq)
449 /* Write to Aux control register to set some bits */
450 ldr r4, scratchpad_base
454 .word 0xE1600070 @ Call SMI monitor (smieq)
455 ldr r4, scratchpad_base
459 .word 0xE1600070 @ Call SMI monitor (smieq)
462 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
464 mrc p15, 0, r1, c1, c0, 1
465 orr r1, r1, #2 @ re-enable L2 cache
466 mcr p15, 0, r1, c1, c0, 1
470 * Invalidate all instruction caches to PoU
471 * and flush branch target cache
473 mcr p15, 0, r1, c7, c5, 0
475 ldr r4, scratchpad_base
484 /* Coprocessor access Control Register */
485 mcr p15, 0, r4, c1, c0, 2
488 MCR p15, 0, r5, c2, c0, 0
490 MCR p15, 0, r6, c2, c0, 1
491 /* Translation table base control register */
492 MCR p15, 0, r7, c2, c0, 2
493 /* Domain access Control Register */
494 MCR p15, 0, r8, c3, c0, 0
495 /* Data fault status Register */
496 MCR p15, 0, r9, c5, c0, 0
499 /* Instruction fault status Register */
500 MCR p15, 0, r4, c5, c0, 1
501 /* Data Auxiliary Fault Status Register */
502 MCR p15, 0, r5, c5, c1, 0
503 /* Instruction Auxiliary Fault Status Register*/
504 MCR p15, 0, r6, c5, c1, 1
505 /* Data Fault Address Register */
506 MCR p15, 0, r7, c6, c0, 0
507 /* Instruction Fault Address Register*/
508 MCR p15, 0, r8, c6, c0, 2
511 /* User r/w thread and process ID */
512 MCR p15, 0, r4, c13, c0, 2
513 /* User ro thread and process ID */
514 MCR p15, 0, r5, c13, c0, 3
515 /* Privileged only thread and process ID */
516 MCR p15, 0, r6, c13, c0, 4
517 /* Cache size selection */
518 MCR p15, 2, r7, c0, c0, 0
520 /* Data TLB lockdown registers */
521 MCR p15, 0, r4, c10, c0, 0
522 /* Instruction TLB lockdown registers */
523 MCR p15, 0, r5, c10, c0, 1
524 /* Secure or Nonsecure Vector Base Address */
525 MCR p15, 0, r6, c12, c0, 0
527 MCR p15, 0, r7, c13, c0, 0
529 MCR p15, 0, r8, c13, c0, 1
532 /* Primary memory remap register */
533 MCR p15, 0, r4, c10, c2, 0
534 /* Normal memory remap register */
535 MCR p15, 0, r5, c10, c2, 1
538 ldmia r3!,{r4} @ load CPSR from SDRAM
539 msr cpsr, r4 @ store cpsr
541 /* Enabling MMU here */
542 mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl
543 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */
549 * More work needs to be done to support N[0:2] value other than 0
550 * So looping here so that the error can be detected
554 mrc p15, 0, r2, c2, c0, 0
558 ldr r5, table_index_mask
559 and r4, r5 @ r4 = 31 to 20 bits of pc
560 /* Extract the value to be written to table entry */
562 /* r1 has the value to be written to table entry*/
564 /* Getting the address of table entry to modify */
566 /* r2 has the location which needs to be modified */
568 /* Storing previous entry of location being modified */
569 ldr r5, scratchpad_base
572 /* Modify the table entry */
575 * Storing address of entry being modified
576 * - will be restored after enabling MMU
578 ldr r5, scratchpad_base
582 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
583 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
584 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
585 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
587 * Restore control register. This enables the MMU.
588 * The caches and prediction are not enabled here, they
589 * will be enabled after restoring the MMU table entry.
592 /* Store previous value of control register in scratchpad */
594 ldr r2, cache_pred_disable_mask
596 mcr p15, 0, r4, c1, c0, 0
599 * ==============================
600 * == Exit point from OFF mode ==
601 * ==============================
603 ldmfd sp!, {r0-r12, pc} @ restore regs and return
610 /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */
613 ldr r4, sdrc_syscfg @ get config addr
614 ldr r5, [r4] @ get value
615 tst r5, #0x100 @ is part access blocked
617 biceq r5, r5, #0x100 @ clear bit if set
618 str r5, [r4] @ write back change
619 ldr r4, sdrc_mr_0 @ get config addr
620 ldr r5, [r4] @ get value
621 str r5, [r4] @ write back change
622 ldr r4, sdrc_emr2_0 @ get config addr
623 ldr r5, [r4] @ get value
624 str r5, [r4] @ write back change
625 ldr r4, sdrc_manual_0 @ get config addr
626 mov r5, #0x2 @ autorefresh command
627 str r5, [r4] @ kick off refreshes
628 ldr r4, sdrc_mr_1 @ get config addr
629 ldr r5, [r4] @ get value
630 str r5, [r4] @ write back change
631 ldr r4, sdrc_emr2_1 @ get config addr
632 ldr r5, [r4] @ get value
633 str r5, [r4] @ write back change
634 ldr r4, sdrc_manual_1 @ get config addr
635 mov r5, #0x2 @ autorefresh command
636 str r5, [r4] @ kick off refreshes
640 .word SDRC_SYSCONFIG_P
646 .word SDRC_MANUAL_0_P
652 .word SDRC_MANUAL_1_P
653 ENTRY(es3_sdrc_fix_sz)
654 .word . - es3_sdrc_fix
657 * This function implements the erratum ID i581 WA:
658 * SDRC state restore before accessing the SDRAM
660 * Only used at return from non-OFF mode. For OFF
661 * mode the ROM code configures the SDRC and
662 * the DPLL before calling the restore code directly
666 /* Make sure SDRC accesses are ok */
669 /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
670 ldr r4, cm_idlest_ckgen
676 ldr r4, cm_idlest1_core
681 /* allow DLL powerdown upon hw idle req */
688 /* Is dll in lock mode? */
689 ldr r4, sdrc_dlla_ctrl
692 bxne lr @ Return if locked
693 /* wait till dll locks */
695 ldr r4, wait_dll_lock_counter
697 str r4, wait_dll_lock_counter
698 ldr r4, sdrc_dlla_status
699 /* Wait 20uS for lock */
708 bx lr @ Return when locked
710 /* disable/reenable DLL if not locked */
712 ldr r4, sdrc_dlla_ctrl
715 bic r6, #(1<<3) @ disable dll
718 orr r6, r6, #(1<<3) @ enable dll
724 b wait_dll_lock_timed
727 .word CM_IDLEST1_CORE_V
729 .word CM_IDLEST_CKGEN_V
731 .word SDRC_DLLA_STATUS_V
733 .word SDRC_DLLA_CTRL_V
735 .word PM_PREPWSTST_CORE_P
737 .word PM_PWSTCTRL_MPU_P
739 .word SCRATCHPAD_BASE_P
741 .word SRAM_BASE_P + 0x8000
750 cache_pred_disable_mask:
755 .word CONTROL_MEM_RTA_CTRL
757 .word v7_flush_dcache_all
761 * When exporting to userspace while the counters are in SRAM,
762 * these 2 words need to be at the end to facilitate retrival!
766 wait_dll_lock_counter:
769 ENTRY(omap34xx_cpu_suspend_sz)
770 .word . - omap34xx_cpu_suspend