1 /* $OpenBSD: pxa2x0_apm_asm.S,v 1.4 2007/11/02 05:18:25 miod Exp $ */
4 * Copyright (c) 2005 Uwe Stuehler <uwe@openbsd.org>
6 * Permission to use, copy, modify, and distribute this software for any
7 * purpose with or without fee is hereby granted, provided that the above
8 * copyright notice and this permission notice appear in all copies.
10 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
19 #include <machine/asm.h>
20 #include <machine/cpu.h>
22 #include <arch/arm/xscale/pxa2x0reg.h>
23 #include <arch/arm/sa11x0/sa11x0_reg.h>
25 /* XXX replace with values defined elsewhere. */
26 #define DCACHE_CACHELINECOUNT 1024
27 #define CACHELINESIZE 32
28 #define DCACHE_SIZE (CACHELINESIZE * DCACHE_CACHELINECOUNT)
31 #define CLKCFG_T (1<<0) /* turbo */
32 #define CLKCFG_F (1<<1) /* frequency change */
33 #define CLKCFG_HT (1<<2) /* half-turbo */
34 #define CLKCFG_B (1<<3) /* fast-bus */
37 #define PWRMODE_NORMAL (0<<0)
38 #define PWRMODE_IDLE (1<<0)
39 #define PWRMODE_STANDBY (2<<0)
40 #define PWRMODE_SLEEP (3<<0)
41 #define PWRMODE_DEEP_SLEEP (7<<0)
44 #define MDREFR_C3000 (MDREFR_K0DB2|MDREFR_E1PIN|MDREFR_K1RUN|\
45 MDREFR_K1DB2|MDREFR_K2DB2|MDREFR_APD)
46 #define MDREFR_DRI_91MHZ (0x13<<0)
47 #define MDREFR_HIGH (MDREFR_C3000 | 0x030)
48 #define MDREFR_LOW (MDREFR_C3000 | 0x00b)
49 #define MDREFR_SPEED_91 (MDREFR_C3000 | MDREFR_DRI_91MHZ)
50 #define MDREFR_SPEED_LOW (MDREFR_C3000 | 0x017)
52 ( 7 << MSC_RRR_SHIFT << 16) | \
53 (15 << MSC_RDN_SHIFT << 16) | \
54 (15 << MSC_RDF_SHIFT << 16) | \
55 (MSC_RT_NONBURST << 16) | \
56 ( 2 << MSC_RRR_SHIFT) | \
57 (13 << MSC_RDN_SHIFT) | \
58 (13 << MSC_RDF_SHIFT) | \
59 MSC_RBW /* PXA271 */ | \
62 ( 7 << MSC_RRR_SHIFT << 16) | \
63 (15 << MSC_RDN_SHIFT << 16) | \
64 (15 << MSC_RDF_SHIFT << 16) | \
65 (MSC_RT_VLIO << 16) | \
66 ( 3 << MSC_RRR_SHIFT) | \
67 ( 4 << MSC_RDN_SHIFT) | \
68 (13 << MSC_RDF_SHIFT) | \
71 ( 7 << MSC_RRR_SHIFT << 16) | \
72 (15 << MSC_RDN_SHIFT << 16) | \
73 (15 << MSC_RDF_SHIFT << 16) | \
74 (MSC_RT_NONBURST << 16) | \
75 ( 3 << MSC_RRR_SHIFT) | \
76 ( 4 << MSC_RDN_SHIFT) | \
77 (13 << MSC_RDF_SHIFT) | \
80 ( 7 << MSC_RRR_SHIFT << 16) | \
81 (15 << MSC_RDN_SHIFT << 16) | \
82 (15 << MSC_RDF_SHIFT << 16) | \
83 (MSC_RT_NONBURST << 16) | \
84 ( 1 << MSC_RRR_SHIFT) | \
85 ( 8 << MSC_RDN_SHIFT) | \
86 ( 8 << MSC_RDF_SHIFT) | \
87 MSC_RBW /* PXA271 */ | \
90 ( 7 << MSC_RRR_SHIFT << 16) | \
91 (15 << MSC_RDN_SHIFT << 16) | \
92 (15 << MSC_RDF_SHIFT << 16) | \
93 (MSC_RT_VLIO << 16) | \
94 ( 1 << MSC_RRR_SHIFT) | \
95 ( 2 << MSC_RDN_SHIFT) | \
96 ( 6 << MSC_RDF_SHIFT) | \
99 ( 7 << MSC_RRR_SHIFT << 16) | \
100 (15 << MSC_RDN_SHIFT << 16) | \
101 (15 << MSC_RDF_SHIFT << 16) | \
102 (MSC_RT_NONBURST << 16) | \
103 ( 1 << MSC_RRR_SHIFT) | \
104 ( 2 << MSC_RDN_SHIFT) | \
105 ( 6 << MSC_RDF_SHIFT) | \
109 .global _C_LABEL(vector_page)
110 .global _C_LABEL(xscale_cache_clean_addr)
111 .global _C_LABEL(pxa2x0_clkman_ioh)
112 .global _C_LABEL(pxa2x0_memctl_ioh)
115 .word _C_LABEL(vector_page)
116 .Lxscale_cache_clean_addr:
117 .word _C_LABEL(xscale_cache_clean_addr)
119 .Lgpioiohp: .word _C_LABEL(pxa2x0_gpio_ioh)
120 .Lclkmaniohp: .word _C_LABEL(pxa2x0_clkman_ioh)
121 .Lmemctliohp: .word _C_LABEL(pxa2x0_memctl_ioh)
123 .Lsleepdata: .word sleepdata
124 .Lsleepdata_phys: .word sleepdata - 0xc0200000 + 0xa0200000 /* XXX */
125 .Lsleepdata_svc: .word sleepdata_svc
127 .Lcccr_high: .word CCCR_A | CCCR_TURBO_X2 | CCCR_RUN_X16
128 .Lmdrefr_high: .word MDREFR_HIGH
129 .Lmsc0_high: .word MSC0_HIGH
130 .Lmsc1_high: .word MSC1_HIGH
131 .Lmsc2_high: .word MSC2_HIGH
132 .Lmdrefr_low: .word MDREFR_LOW
133 .Lmsc0_low: .word MSC0_LOW
134 .Lmsc1_low: .word MSC1_LOW
135 .Lmsc2_low: .word MSC2_LOW
138 * void pxa2x0_cpu_suspend(void)
140 * Enter sleep mode without automatic voltage change. The core must
141 * be in low power mode, and interrupts disabled.
143 ENTRY(pxa2x0_cpu_suspend)
144 stmdb sp!, {r0-r12, lr}
146 ldr r3, .Lsleepdata /* Point to the data area. */
147 ldr r2, =pxa2x0_cpu_resume_virt
150 mrc p15, 0, r2, c1, c0, 0 /* Load MMU control register. */
152 orr r0, r0, #0x00ff0000
153 bic r2, r2, r0 /* Clear undefined bits. */
154 str r2, [r3], #4 /* Save MMU control register. */
156 mrc p15, 0, r2, c2, c0, 0 /* Load TTB address. */
158 orr r0, r0, #0x000000ff
159 bic r2, r2, r0 /* Clear undefined bits. */
160 str r2, [r3], #4 /* Save TTB address. */
162 mrc p15, 0, r2, c3, c0, 0 /* Load domain access control. */
163 str r2, [r3], #4 /* Save domain access control. */
165 mrs r2, spsr /* Load SVC saved CPSR. */
166 str r2, [r3], #4 /* Save SVC saved CPSR. */
167 str sp, [r3], #4 /* Save SVC stack pointer. */
169 mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
170 msr cpsr, r1 /* Enter FIQ mode. */
171 mrs r2, spsr /* Load FIQ mode saved CPSR. */
172 stmia r3!, {r2, r8-r12, sp, lr} /* Save FIQ mode registers. */
174 mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
175 msr cpsr, r1 /* Enter IRQ mode. */
176 mrs r0, spsr /* Load IRQ mode saved CPSR. */
177 stmia r3!, {r0, sp, lr} /* Save IRQ mode registers. */
179 mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
180 msr cpsr, r1 /* Enter ABT mode. */
181 mrs r0, spsr /* Load ABT mode saved CPSR. */
182 stmia r3!, {r0, sp, lr} /* Save ABT mode registers. */
184 mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
185 msr cpsr, r1 /* Enter UND mode. */
186 mrs r0, spsr /* Load UND mode saved CPSR. */
187 stmia r3!, {r0, sp, lr} /* Save UND mode registers. */
189 mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
190 msr cpsr, r1 /* Enter SYS mode. */
191 stmia r3!, {sp, lr} /* Save SYS mode registers. */
193 mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
194 msr cpsr, r1 /* Return to SVC mode. */
196 /* At this point all critical registers have been saved. */
199 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
201 mov r1, #DCACHE_CACHELINECOUNT
202 ldr r2, .Lxscale_cache_clean_addr
205 * For an explanation of the following two instructions, refer
206 * to the ``BUG ALERT'' section of the XSCALE_CACHE_CLEAN_PROLOGUE
207 * macro in arch/arm/arm/cpufunc_asm_xscale.S.
209 eor r0, r0, #(DCACHE_SIZE)
214 orr r2, r2, #(I32_bit|F32_bit)
215 msr cpsr_c, r2 /* disable IRQ/FIQ */
217 mcr p15, 0, r0, c7, c2, 5 /* allocate cache line */
218 mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */
221 and r2, r2, #~(I32_bit|F32_bit)
222 msr cpsr_c, r2 /* enable IRQ/FIQ */
224 add r0, r0, #CACHELINESIZE
229 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
245 /* Prepare to enter sleep mode. */
246 mov r1, #PWRMODE_SLEEP
248 /* Prepare to put SDRAM into self-refresh mode. */
251 add r4, r4, #MEMCTL_MDREFR
253 orr r5, r5, #MDREFR_SLFRSH
255 /* XXX prepare pointer to physical address 0, but for whom? */
256 ldr r2, .Lvector_page
259 * Execute the rest of this routine from cache. The needed values
260 * are now in registers.
263 /* XXX tell as(1) to dump the literal pool here, but why? */
268 /* Put SDRAM into self-refresh mode manually. */
273 * Enter sleep mode. Exit from sleep mode returns the processor
274 * to normal run mode. Execution resumes at the physical address
275 * stored in the PSPR after the required boot sequence (a short
276 * excursion into the ROM boot loader).
278 mcr p14, 0, r1, c7, c0, 0
280 /* Just in case that wake-up does not resume at */
288 * void pxa2x0_cpu_resume(void)
291 ENTRY(pxa2x0_cpu_resume)
292 /* XXX C3000-specific */
293 ldr r0, .Lmdrefr_addr_phys
298 bic r2, r2, #MDREFR_DRI & 0x000000ff
299 bic r2, r2, #MDREFR_DRI & 0x0000ff00
300 orr r2, r2, #MDREFR_DRI_91MHZ
305 ldr r0, .Lsleepdata_phys /* Point to PA of saved data. */
308 mcr p15, 0, r10, c3, c0, 0 /* Restore domain access control. */
309 mcr p15, 0, r9, c2, c0, 0 /* Restore TTB address. */
310 mcr p15, 0, r0, c8, c7, 0 /* Flush I+D TLBs. */
311 mcr p15, 0, r0, c7, c7, 0 /* Flush I+D BTB. */
312 mcr p15, 0, r8, c1, c0, 0 /* Restore MMU control. */
313 mov pc, r7 /* Jump to virtual address. */
323 pxa2x0_cpu_resume_virt:
324 ldr r2, .Lsleepdata_svc /* Load VA of saved registers. */
326 /* Restore SVC mode SPSR and stack pointer. */
331 /* Restore FIQ mode registers. */
332 mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
344 /* Restore IRQ mode registers. */
345 mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
352 /* Restore ABT mode registers. */
353 mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
360 /* Restore UND mode registers. */
361 mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
368 /* Restore SYS mode registers. */
369 mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
374 /* Return to SVC mode. */
375 mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
378 ldmia sp!, {r0-r12, pc}
381 .word PXA2X0_MEMCTL_BASE + MEMCTL_MDREFR
386 * Saved processor state
390 .word 0 /* =pxa2x0_cpu_resume_virt */
391 .word 0 /* MMU control */
392 .word 0 /* MMU TTB address */
393 .word 0 /* MMU domain access control */
395 .word 0 /* SVC mode saved CPSR */
396 .word 0 /* SVC mode stack pointer */
397 .word 0 /* FIQ mode saved CPSR */
398 .word 0 /* FIQ mode r8 */
399 .word 0 /* FIQ mode r9 */
400 .word 0 /* FIQ mode r10 */
401 .word 0 /* FIQ mode r11 */
402 .word 0 /* FIQ mode r12 */
403 .word 0 /* FIQ mode stack pointer */
404 .word 0 /* FIQ mode link register */
405 .word 0 /* IRQ mode saved CPSR */
406 .word 0 /* IRQ mode stack pointer */
407 .word 0 /* IRQ mode link register */
408 .word 0 /* ABT mode saved CPSR */
409 .word 0 /* ABT mode stack pointer */
410 .word 0 /* ABT mode link register */
411 .word 0 /* UND mode saved CPSR */
412 .word 0 /* UND mode stack pointer */
413 .word 0 /* UND mode link register */
414 .word 0 /* SYS mode stack pointer */
415 .word 0 /* SYS mode link register */
420 * void pxa27x_run_mode(void)
422 * Disable half-turbo and turbo mode, but keep fast-bus mode.
423 * Memory and LCD clock is not changed, so no reconfiguration is
426 ENTRY(pxa27x_run_mode)
428 mrc p14, 0, r0, c6, c0, 0
429 and r0, r0, #~(CLKCFG_HT | CLKCFG_F| CLKCFG_T)
430 mcr p14, 0, r0, c6, c0, 0
435 * void pxa27x_fastbus_run_mode(int enable, u_int32_t mdrefr)
437 * Enter normal run mode with fast-bus mode enabled or disabled.
438 * The new value of MDREFR is programmed before or after CLKCFG,
442 ENTRY(pxa27x_fastbus_run_mode)
443 stmdb sp!, {r0-r2, lr}
451 /* Enter normal run mode with fast-bus mode enabled. */
453 mcr p14, 0, r0, c6, c0, 0
454 /* Set the new SDRAM refresh rate. */
455 str r1, [r2, #MEMCTL_MDREFR]
456 ldr r0, [r2, #MEMCTL_MDREFR]
458 ldmia sp!, {r0-r2, pc}
461 /* Set the new SDRAM refresh rate. */
462 str r1, [r2, #MEMCTL_MDREFR]
463 ldr r0, [r2, #MEMCTL_MDREFR]
465 /* Enter normal run mode with fast-bus mode disabled. */
467 mcr p14, 0, r0, c6, c0, 0
468 ldmia sp!, {r0-r2, pc}
470 /* Keep these offsets in sync with struct memcfg. */
471 #define memcfg_mdrefr_high 0x00
472 #define memcfg_mdrefr_low 0x04
473 #define memcfg_mdrefr_low2 0x08 /* unused */
474 #define memcfg_msc_high 0x0c
475 #define memcfg_msc_low 0x18
476 #define memcfg_mdrefr_91 0x24
479 * void pxa27x_frequency_change(int cccr, int clkcfg,
480 * struct pxa2x0_memcfg *memcfg)
482 * Change the core PLL frequency and SDRAM refresh rate, ensuring the
483 * proper sequence of operations. If the CCCR_A bit is clear and L
484 * is not equal to 7 the result is undefined.
487 ENTRY(pxa27x_frequency_change)
488 stmdb sp!, {r0-r5, lr}
490 /* Always write to CCCR before a frequency change. */
493 str r0, [r3, #CLKMAN_CCCR]
495 /* Load the needed values into registers to avoid SDRAM access. */
496 and r3, r0, #CCCR_L_MASK
499 cmp r3, #CCCR_RUN_X7 /* L=7 is 91MHz mode */
500 beq frequency_change_91
501 and r3, r1, #CLKCFG_B
503 bne frequency_change_208
505 frequency_change_high:
506 ldr r3, [r2, #memcfg_mdrefr_low]
507 ldr r4, [r2, #memcfg_mdrefr_high]
508 add r2, r2, #memcfg_msc_high
509 bl frequency_change_on_cache /* XXX why BL? */
510 frequency_change_208:
511 ldr r3, [r2, #memcfg_mdrefr_low]
512 ldr r4, [r2, #memcfg_mdrefr_low]
513 add r2, r2, #memcfg_msc_high
514 bl frequency_change_on_cache
516 ldr r3, [r2, #memcfg_mdrefr_low]
517 ldr r4, [r2, #memcfg_mdrefr_91]
518 add r2, r2, #memcfg_msc_low
519 bl frequency_change_on_cache
521 /* Align execution to a cache line. */
523 frequency_change_on_cache:
524 /* Change to a low SDRAM refresh rate. Wait until the store to
525 * MDREFR is complete, following section 2.4 I/O Ordering and
526 * 6.5.1.4 of the PXA27x Developer's Manual. */
527 str r3, [r0, #MEMCTL_MDREFR]
528 ldr r5, [r0, #MEMCTL_MDREFR]
530 /* Program new CLKCFG value, starting a core PLL frequency change
531 * if CLKCFG_F is set. */
532 mcr p14, 0, r1, c6, c0, 0
533 /* Change SDRAM clock frequency to 104MHz, and ensure that the
534 * store to MDREFR is complete before the next SDRAM access. */
535 str r4, [r0, #MEMCTL_MDREFR]
536 ldr r5, [r0, #MEMCTL_MDREFR]
538 /* Configure synchronous, static, and VLIO interfaces. */
540 str r1, [r0, #MEMCTL_MSC0]
542 str r1, [r0, #MEMCTL_MSC1]
544 str r1, [r0, #MEMCTL_MSC2]
545 ldmia sp!, {r0-r5, pc}
548 * void pxa27x_cpu_speed_91(void)
550 * Switch core run frequency to 91 MHz.
553 ENTRY(pxa27x_cpu_speed_91)
554 stmdb sp!, {r0-r3, lr}
559 str r1, [r0, #CLKMAN_CCCR]
569 str r3, [r0, #MEMCTL_MDREFR]
570 ldr r3, [r0, #MEMCTL_MDREFR]
573 mcr p14, 0, r1, c6, c0, 0
574 str r2, [r0, #MEMCTL_MDREFR]
575 ldr r2, [r0, #MEMCTL_MDREFR]
578 str r1, [r0, #MEMCTL_MSC0]
580 str r1, [r0, #MEMCTL_MSC1]
582 str r1, [r0, #MEMCTL_MSC2]
584 ldmia sp!, {r0-r3, pc}
586 .Lcccr_91: .word CCCR_TURBO_X1 | CCCR_RUN_X7
587 .Lmdrefr_91: .word MDREFR_SPEED_91