pxa2x0_apm_asm.S revision 1.2 1 1.1 ober /* $OpenBSD: pxa2x0_apm_asm.S,v 1.4 2007/11/02 05:18:25 miod Exp $ */
2 1.1 ober
3 1.1 ober /*
4 1.1 ober * Copyright (c) 2005 Uwe Stuehler <uwe (at) openbsd.org>
5 1.1 ober *
6 1.1 ober * Permission to use, copy, modify, and distribute this software for any
7 1.1 ober * purpose with or without fee is hereby granted, provided that the above
8 1.1 ober * copyright notice and this permission notice appear in all copies.
9 1.1 ober *
10 1.1 ober * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 1.1 ober * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 1.1 ober * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 1.1 ober * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 1.1 ober * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 1.1 ober * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 1.1 ober * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 1.1 ober */
18 1.1 ober
19 1.1 ober #include <machine/asm.h>
20 1.1 ober #include <machine/cpu.h>
21 1.1 ober
22 1.1 ober #include <arch/arm/xscale/pxa2x0reg.h>
23 1.1 ober #include <arch/arm/sa11x0/sa11x0_reg.h>
24 1.1 ober
25 1.1 ober /* XXX replace with values defined elsewhere. */
26 1.1 ober #define DCACHE_CACHELINECOUNT 1024
27 1.1 ober #define CACHELINESIZE 32
28 1.1 ober #define DCACHE_SIZE (CACHELINESIZE * DCACHE_CACHELINECOUNT)
29 1.1 ober
30 1.1 ober /* cp14 register 6 */
31 1.1 ober #define CLKCFG_T (1<<0) /* turbo */
32 1.1 ober #define CLKCFG_F (1<<1) /* frequency change */
33 1.1 ober #define CLKCFG_HT (1<<2) /* half-turbo */
34 1.1 ober #define CLKCFG_B (1<<3) /* fast-bus */
35 1.1 ober
36 1.1 ober /* cp14 register 7 */
37 1.1 ober #define PWRMODE_NORMAL (0<<0)
38 1.1 ober #define PWRMODE_IDLE (1<<0)
39 1.1 ober #define PWRMODE_STANDBY (2<<0)
40 1.1 ober #define PWRMODE_SLEEP (3<<0)
41 1.1 ober #define PWRMODE_DEEP_SLEEP (7<<0)
42 1.1 ober
43 1.1 ober /* XXX */
44 1.1 ober #define MDREFR_C3000 (MDREFR_K0DB2|MDREFR_E1PIN|MDREFR_K1RUN|\
45 1.1 ober MDREFR_K1DB2|MDREFR_K2DB2|MDREFR_APD)
46 1.1 ober #define MDREFR_DRI_91MHZ (0x13<<0)
47 1.1 ober #define MDREFR_HIGH (MDREFR_C3000 | 0x030)
48 1.1 ober #define MDREFR_LOW (MDREFR_C3000 | 0x00b)
49 1.1 ober #define MDREFR_SPEED_91 (MDREFR_C3000 | MDREFR_DRI_91MHZ)
50 1.1 ober #define MDREFR_SPEED_LOW (MDREFR_C3000 | 0x017)
51 1.1 ober #define MSC0_HIGH \
52 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
53 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
54 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
55 1.1 ober (MSC_RT_NONBURST << 16) | \
56 1.1 ober ( 2 << MSC_RRR_SHIFT) | \
57 1.1 ober (13 << MSC_RDN_SHIFT) | \
58 1.1 ober (13 << MSC_RDF_SHIFT) | \
59 1.1 ober MSC_RBW /* PXA271 */ | \
60 1.1 ober MSC_RT_NONBURST
61 1.1 ober #define MSC1_HIGH \
62 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
63 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
64 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
65 1.1 ober (MSC_RT_VLIO << 16) | \
66 1.1 ober ( 3 << MSC_RRR_SHIFT) | \
67 1.1 ober ( 4 << MSC_RDN_SHIFT) | \
68 1.1 ober (13 << MSC_RDF_SHIFT) | \
69 1.1 ober MSC_RT_VLIO
70 1.1 ober #define MSC2_HIGH \
71 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
72 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
73 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
74 1.1 ober (MSC_RT_NONBURST << 16) | \
75 1.1 ober ( 3 << MSC_RRR_SHIFT) | \
76 1.1 ober ( 4 << MSC_RDN_SHIFT) | \
77 1.1 ober (13 << MSC_RDF_SHIFT) | \
78 1.1 ober MSC_RT_VLIO
79 1.1 ober #define MSC0_LOW \
80 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
81 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
82 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
83 1.1 ober (MSC_RT_NONBURST << 16) | \
84 1.1 ober ( 1 << MSC_RRR_SHIFT) | \
85 1.1 ober ( 8 << MSC_RDN_SHIFT) | \
86 1.1 ober ( 8 << MSC_RDF_SHIFT) | \
87 1.1 ober MSC_RBW /* PXA271 */ | \
88 1.1 ober MSC_RT_NONBURST
89 1.1 ober #define MSC1_LOW \
90 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
91 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
92 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
93 1.1 ober (MSC_RT_VLIO << 16) | \
94 1.1 ober ( 1 << MSC_RRR_SHIFT) | \
95 1.1 ober ( 2 << MSC_RDN_SHIFT) | \
96 1.1 ober ( 6 << MSC_RDF_SHIFT) | \
97 1.1 ober MSC_RT_VLIO
98 1.1 ober #define MSC2_LOW \
99 1.1 ober ( 7 << MSC_RRR_SHIFT << 16) | \
100 1.1 ober (15 << MSC_RDN_SHIFT << 16) | \
101 1.1 ober (15 << MSC_RDF_SHIFT << 16) | \
102 1.1 ober (MSC_RT_NONBURST << 16) | \
103 1.1 ober ( 1 << MSC_RRR_SHIFT) | \
104 1.1 ober ( 2 << MSC_RDN_SHIFT) | \
105 1.1 ober ( 6 << MSC_RDF_SHIFT) | \
106 1.1 ober MSC_RT_VLIO
107 1.1 ober
108 1.1 ober .text
109 1.1 ober .global _C_LABEL(vector_page)
110 1.1 ober .global _C_LABEL(xscale_cache_clean_addr)
111 1.1 ober .global _C_LABEL(pxa2x0_clkman_ioh)
112 1.1 ober .global _C_LABEL(pxa2x0_memctl_ioh)
113 1.1 ober
114 1.1 ober .Lvector_page:
115 1.1 ober .word _C_LABEL(vector_page)
116 1.1 ober .Lxscale_cache_clean_addr:
117 1.1 ober .word _C_LABEL(xscale_cache_clean_addr)
118 1.1 ober
119 1.1 ober .Lgpioiohp: .word _C_LABEL(pxa2x0_gpio_ioh)
120 1.1 ober .Lclkmaniohp: .word _C_LABEL(pxa2x0_clkman_ioh)
121 1.1 ober .Lmemctliohp: .word _C_LABEL(pxa2x0_memctl_ioh)
122 1.1 ober
123 1.1 ober .Lsleepdata: .word sleepdata
124 1.1 ober .Lsleepdata_phys: .word sleepdata - 0xc0200000 + 0xa0200000 /* XXX */
125 1.1 ober .Lsleepdata_svc: .word sleepdata_svc
126 1.1 ober
127 1.1 ober .Lcccr_high: .word CCCR_A | CCCR_TURBO_X2 | CCCR_RUN_X16
128 1.1 ober .Lmdrefr_high: .word MDREFR_HIGH
129 1.1 ober .Lmsc0_high: .word MSC0_HIGH
130 1.1 ober .Lmsc1_high: .word MSC1_HIGH
131 1.1 ober .Lmsc2_high: .word MSC2_HIGH
132 1.1 ober .Lmdrefr_low: .word MDREFR_LOW
133 1.1 ober .Lmsc0_low: .word MSC0_LOW
134 1.1 ober .Lmsc1_low: .word MSC1_LOW
135 1.1 ober .Lmsc2_low: .word MSC2_LOW
136 1.1 ober
137 1.1 ober /*
138 1.1 ober * void pxa2x0_cpu_suspend(void)
139 1.1 ober *
140 1.1 ober * Enter sleep mode without automatic voltage change. The core must
141 1.1 ober * be in low power mode, and interrupts disabled.
142 1.1 ober */
143 1.1 ober ENTRY(pxa2x0_cpu_suspend)
144 1.1 ober stmdb sp!, {r0-r12, lr}
145 1.1 ober
146 1.1 ober ldr r3, .Lsleepdata /* Point to the data area. */
147 1.1 ober ldr r2, =pxa2x0_cpu_resume_virt
148 1.1 ober str r2, [r3], #4
149 1.1 ober
150 1.1 ober mrc p15, 0, r2, c1, c0, 0 /* Load MMU control register. */
151 1.1 ober mov r0, #0xff000000
152 1.1 ober orr r0, r0, #0x00ff0000
153 1.1 ober bic r2, r2, r0 /* Clear undefined bits. */
154 1.1 ober str r2, [r3], #4 /* Save MMU control register. */
155 1.1 ober
156 1.1 ober mrc p15, 0, r2, c2, c0, 0 /* Load TTB address. */
157 1.1 ober mov r0, #0x00003f00
158 1.1 ober orr r0, r0, #0x000000ff
159 1.1 ober bic r2, r2, r0 /* Clear undefined bits. */
160 1.1 ober str r2, [r3], #4 /* Save TTB address. */
161 1.1 ober
162 1.1 ober mrc p15, 0, r2, c3, c0, 0 /* Load domain access control. */
163 1.1 ober str r2, [r3], #4 /* Save domain access control. */
164 1.1 ober
165 1.1 ober mrs r2, spsr /* Load SVC saved CPSR. */
166 1.1 ober str r2, [r3], #4 /* Save SVC saved CPSR. */
167 1.1 ober str sp, [r3], #4 /* Save SVC stack pointer. */
168 1.1 ober
169 1.1 ober mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
170 1.1 ober msr cpsr, r1 /* Enter FIQ mode. */
171 1.1 ober mrs r2, spsr /* Load FIQ mode saved CPSR. */
172 1.1 ober stmia r3!, {r2, r8-r12, sp, lr} /* Save FIQ mode registers. */
173 1.1 ober
174 1.1 ober mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
175 1.1 ober msr cpsr, r1 /* Enter IRQ mode. */
176 1.1 ober mrs r0, spsr /* Load IRQ mode saved CPSR. */
177 1.1 ober stmia r3!, {r0, sp, lr} /* Save IRQ mode registers. */
178 1.1 ober
179 1.1 ober mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
180 1.1 ober msr cpsr, r1 /* Enter ABT mode. */
181 1.1 ober mrs r0, spsr /* Load ABT mode saved CPSR. */
182 1.1 ober stmia r3!, {r0, sp, lr} /* Save ABT mode registers. */
183 1.1 ober
184 1.1 ober mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
185 1.1 ober msr cpsr, r1 /* Enter UND mode. */
186 1.1 ober mrs r0, spsr /* Load UND mode saved CPSR. */
187 1.1 ober stmia r3!, {r0, sp, lr} /* Save UND mode registers. */
188 1.1 ober
189 1.1 ober mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
190 1.1 ober msr cpsr, r1 /* Enter SYS mode. */
191 1.1 ober stmia r3!, {sp, lr} /* Save SYS mode registers. */
192 1.1 ober
193 1.1 ober mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
194 1.1 ober msr cpsr, r1 /* Return to SVC mode. */
195 1.1 ober
196 1.1 ober /* At this point all critical registers have been saved. */
197 1.1 ober
198 1.1 ober mov r0, #0
199 1.1 ober mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
200 1.1 ober
201 1.1 ober mov r1, #DCACHE_CACHELINECOUNT
202 1.1 ober ldr r2, .Lxscale_cache_clean_addr
203 1.1 ober ldr r0, [r2]
204 1.1 ober /*
205 1.1 ober * For an explanation of the following two instructions, refer
206 1.1 ober * to the ``BUG ALERT'' section of the XSCALE_CACHE_CLEAN_PROLOGUE
207 1.1 ober * macro in arch/arm/arm/cpufunc_asm_xscale.S.
208 1.1 ober */
209 1.1 ober eor r0, r0, #(DCACHE_SIZE)
210 1.1 ober str r0, [r2]
211 1.1 ober
212 1.1 ober cache_flush_loop:
213 1.1 ober mrs r2, cpsr
214 1.1 ober orr r2, r2, #(I32_bit|F32_bit)
215 1.1 ober msr cpsr_c, r2 /* disable IRQ/FIQ */
216 1.1 ober
217 1.1 ober mcr p15, 0, r0, c7, c2, 5 /* allocate cache line */
218 1.1 ober mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */
219 1.1 ober
220 1.1 ober mrs r2, cpsr
221 1.1 ober and r2, r2, #~(I32_bit|F32_bit)
222 1.1 ober msr cpsr_c, r2 /* enable IRQ/FIQ */
223 1.1 ober
224 1.1 ober add r0, r0, #CACHELINESIZE
225 1.1 ober subs r1, r1, #1
226 1.1 ober bne cache_flush_loop
227 1.1 ober
228 1.1 ober mov r0, #0
229 1.1 ober mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
230 1.1 ober
231 1.1 ober b 1f
232 1.1 ober 1:
233 1.1 ober nop
234 1.1 ober nop
235 1.1 ober nop
236 1.1 ober nop
237 1.1 ober nop
238 1.1 ober nop
239 1.1 ober nop
240 1.1 ober nop
241 1.1 ober nop
242 1.1 ober nop
243 1.1 ober nop
244 1.1 ober
245 1.1 ober /* Prepare to enter sleep mode. */
246 1.1 ober mov r1, #PWRMODE_SLEEP
247 1.1 ober
248 1.1 ober /* Prepare to put SDRAM into self-refresh mode. */
249 1.1 ober ldr r4, .Lmemctliohp
250 1.1 ober ldr r4, [r4]
251 1.1 ober add r4, r4, #MEMCTL_MDREFR
252 1.1 ober ldr r5, [r4]
253 1.1 ober orr r5, r5, #MDREFR_SLFRSH
254 1.1 ober
255 1.1 ober /* XXX prepare pointer to physical address 0, but for whom? */
256 1.1 ober ldr r2, .Lvector_page
257 1.1 ober
258 1.1 ober /*
259 1.1 ober * Execute the rest of this routine from cache. The needed values
260 1.1 ober * are now in registers.
261 1.1 ober */
262 1.1 ober b 1f
263 1.1 ober /* XXX tell as(1) to dump the literal pool here, but why? */
264 1.1 ober .ltorg
265 1.1 ober .align 5
266 1.1 ober 1:
267 1.1 ober
268 1.1 ober /* Put SDRAM into self-refresh mode manually. */
269 1.1 ober str r5, [r4]
270 1.1 ober nop
271 1.1 ober
272 1.1 ober /*
273 1.1 ober * Enter sleep mode. Exit from sleep mode returns the processor
274 1.1 ober * to normal run mode. Execution resumes at the physical address
275 1.1 ober * stored in the PSPR after the required boot sequence (a short
276 1.1 ober * excursion into the ROM boot loader).
277 1.1 ober */
278 1.1 ober mcr p14, 0, r1, c7, c0, 0
279 1.1 ober
280 1.1 ober /* Just in case that wake-up does not resume at */
281 1.1 ober nop
282 1.1 ober nop
283 1.1 ober nop
284 1.1 ober 1:
285 1.1 ober b 1b
286 1.1 ober
287 1.1 ober /*
288 1.1 ober * void pxa2x0_cpu_resume(void)
289 1.1 ober */
290 1.1 ober .align 5
291 1.1 ober ENTRY(pxa2x0_cpu_resume)
292 1.1 ober /* XXX C3000-specific */
293 1.1 ober ldr r0, .Lmdrefr_addr_phys
294 1.1 ober b 1f
295 1.1 ober .align 5
296 1.1 ober 1:
297 1.1 ober ldr r2, [r0]
298 1.1 ober bic r2, r2, #MDREFR_DRI & 0x000000ff
299 1.1 ober bic r2, r2, #MDREFR_DRI & 0x0000ff00
300 1.1 ober orr r2, r2, #MDREFR_DRI_91MHZ
301 1.1 ober str r2, [r0]
302 1.1 ober b 1f
303 1.1 ober .align 5
304 1.1 ober 1:
305 1.1 ober ldr r0, .Lsleepdata_phys /* Point to PA of saved data. */
306 1.1 ober
307 1.1 ober ldmia r0!, {r7-r10}
308 1.1 ober mcr p15, 0, r10, c3, c0, 0 /* Restore domain access control. */
309 1.1 ober mcr p15, 0, r9, c2, c0, 0 /* Restore TTB address. */
310 1.1 ober mcr p15, 0, r0, c8, c7, 0 /* Flush I+D TLBs. */
311 1.1 ober mcr p15, 0, r0, c7, c7, 0 /* Flush I+D BTB. */
312 1.1 ober mcr p15, 0, r8, c1, c0, 0 /* Restore MMU control. */
313 1.1 ober mov pc, r7 /* Jump to virtual address. */
314 1.1 ober nop
315 1.1 ober nop
316 1.1 ober nop
317 1.1 ober nop
318 1.1 ober nop
319 1.1 ober nop
320 1.1 ober nop
321 1.1 ober nop
322 1.1 ober
323 1.1 ober pxa2x0_cpu_resume_virt:
324 1.1 ober ldr r2, .Lsleepdata_svc /* Load VA of saved registers. */
325 1.1 ober
326 1.1 ober /* Restore SVC mode SPSR and stack pointer. */
327 1.1 ober ldr r0, [r2], #4
328 1.1 ober msr spsr, r0
329 1.1 ober ldr sp, [r2], #4
330 1.1 ober
331 1.1 ober /* Restore FIQ mode registers. */
332 1.1 ober mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
333 1.1 ober msr cpsr, r1
334 1.1 ober ldr r0, [r2], #4
335 1.1 ober msr spsr, r0
336 1.1 ober ldr r8, [r2], #4
337 1.1 ober ldr r9, [r2], #4
338 1.1 ober ldr r10, [r2], #4
339 1.1 ober ldr r11, [r2], #4
340 1.1 ober ldr r12, [r2], #4
341 1.1 ober ldr sp, [r2], #4
342 1.1 ober ldr lr, [r2], #4
343 1.1 ober
344 1.1 ober /* Restore IRQ mode registers. */
345 1.1 ober mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
346 1.1 ober msr cpsr, r1
347 1.1 ober ldr r0, [r2], #4
348 1.1 ober msr spsr, r0
349 1.1 ober ldr sp, [r2], #4
350 1.1 ober ldr lr, [r2], #4
351 1.1 ober
352 1.1 ober /* Restore ABT mode registers. */
353 1.1 ober mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
354 1.1 ober msr cpsr, r1
355 1.1 ober ldr r0, [r2], #4
356 1.1 ober msr spsr, r0
357 1.1 ober ldr sp, [r2], #4
358 1.1 ober ldr lr, [r2], #4
359 1.1 ober
360 1.1 ober /* Restore UND mode registers. */
361 1.1 ober mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
362 1.1 ober msr cpsr, r1
363 1.1 ober ldr r0, [r2], #4
364 1.1 ober msr spsr, r0
365 1.1 ober ldr sp, [r2], #4
366 1.1 ober ldr lr, [r2], #4
367 1.1 ober
368 1.1 ober /* Restore SYS mode registers. */
369 1.1 ober mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
370 1.1 ober msr cpsr, r1
371 1.1 ober ldr sp, [r2], #4
372 1.1 ober ldr lr, [r2], #4
373 1.1 ober
374 1.1 ober /* Return to SVC mode. */
375 1.1 ober mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
376 1.1 ober msr cpsr, r1
377 1.1 ober
378 1.1 ober ldmia sp!, {r0-r12, pc}
379 1.1 ober
380 1.1 ober .Lmdrefr_addr_phys:
381 1.1 ober .word PXA2X0_MEMCTL_BASE + MEMCTL_MDREFR
382 1.1 ober
383 1.1 ober .data
384 1.1 ober
385 1.1 ober /*
386 1.1 ober * Saved processor state
387 1.1 ober */
388 1.1 ober .align 5
389 1.1 ober sleepdata:
390 1.1 ober .word 0 /* =pxa2x0_cpu_resume_virt */
391 1.1 ober .word 0 /* MMU control */
392 1.1 ober .word 0 /* MMU TTB address */
393 1.1 ober .word 0 /* MMU domain access control */
394 1.1 ober sleepdata_svc:
395 1.1 ober .word 0 /* SVC mode saved CPSR */
396 1.1 ober .word 0 /* SVC mode stack pointer */
397 1.1 ober .word 0 /* FIQ mode saved CPSR */
398 1.1 ober .word 0 /* FIQ mode r8 */
399 1.1 ober .word 0 /* FIQ mode r9 */
400 1.1 ober .word 0 /* FIQ mode r10 */
401 1.1 ober .word 0 /* FIQ mode r11 */
402 1.1 ober .word 0 /* FIQ mode r12 */
403 1.1 ober .word 0 /* FIQ mode stack pointer */
404 1.1 ober .word 0 /* FIQ mode link register */
405 1.1 ober .word 0 /* IRQ mode saved CPSR */
406 1.1 ober .word 0 /* IRQ mode stack pointer */
407 1.1 ober .word 0 /* IRQ mode link register */
408 1.1 ober .word 0 /* ABT mode saved CPSR */
409 1.1 ober .word 0 /* ABT mode stack pointer */
410 1.1 ober .word 0 /* ABT mode link register */
411 1.1 ober .word 0 /* UND mode saved CPSR */
412 1.1 ober .word 0 /* UND mode stack pointer */
413 1.1 ober .word 0 /* UND mode link register */
414 1.1 ober .word 0 /* SYS mode stack pointer */
415 1.1 ober .word 0 /* SYS mode link register */
416 1.1 ober
417 1.1 ober .text
418 1.1 ober
419 1.1 ober /*
420 1.1 ober * void pxa27x_run_mode(void)
421 1.1 ober *
422 1.1 ober * Disable half-turbo and turbo mode, but keep fast-bus mode.
423 1.1 ober * Memory and LCD clock is not changed, so no reconfiguration is
424 1.1 ober * necessary.
425 1.1 ober */
426 1.1 ober ENTRY(pxa27x_run_mode)
427 1.1 ober stmdb sp!, {r0}
428 1.1 ober mrc p14, 0, r0, c6, c0, 0
429 1.1 ober and r0, r0, #~(CLKCFG_HT | CLKCFG_F| CLKCFG_T)
430 1.1 ober mcr p14, 0, r0, c6, c0, 0
431 1.1 ober ldmia sp!, {r0}
432 1.1 ober mov pc, lr
433 1.1 ober
434 1.1 ober /*
435 1.2 skrll * void pxa27x_fastbus_run_mode(int enable, uint32_t mdrefr)
436 1.1 ober *
437 1.1 ober * Enter normal run mode with fast-bus mode enabled or disabled.
438 1.1 ober * The new value of MDREFR is programmed before or after CLKCFG,
439 1.1 ober * as appropriate.
440 1.1 ober */
441 1.1 ober .align 5
442 1.1 ober ENTRY(pxa27x_fastbus_run_mode)
443 1.1 ober stmdb sp!, {r0-r2, lr}
444 1.1 ober ldr r2, .Lmemctliohp
445 1.1 ober ldr r2, [r2]
446 1.1 ober cmp r0, #0
447 1.1 ober beq disable_fastbus
448 1.1 ober b enable_fastbus
449 1.1 ober .align 5
450 1.1 ober enable_fastbus:
451 1.1 ober /* Enter normal run mode with fast-bus mode enabled. */
452 1.1 ober mov r0, #CLKCFG_B
453 1.1 ober mcr p14, 0, r0, c6, c0, 0
454 1.1 ober /* Set the new SDRAM refresh rate. */
455 1.1 ober str r1, [r2, #MEMCTL_MDREFR]
456 1.1 ober ldr r0, [r2, #MEMCTL_MDREFR]
457 1.1 ober mov r0, r0
458 1.1 ober ldmia sp!, {r0-r2, pc}
459 1.1 ober .align 5
460 1.1 ober disable_fastbus:
461 1.1 ober /* Set the new SDRAM refresh rate. */
462 1.1 ober str r1, [r2, #MEMCTL_MDREFR]
463 1.1 ober ldr r0, [r2, #MEMCTL_MDREFR]
464 1.1 ober mov r0, r0
465 1.1 ober /* Enter normal run mode with fast-bus mode disabled. */
466 1.1 ober mov r0, #0x0
467 1.1 ober mcr p14, 0, r0, c6, c0, 0
468 1.1 ober ldmia sp!, {r0-r2, pc}
469 1.1 ober
470 1.1 ober /* Keep these offsets in sync with struct memcfg. */
471 1.1 ober #define memcfg_mdrefr_high 0x00
472 1.1 ober #define memcfg_mdrefr_low 0x04
473 1.1 ober #define memcfg_mdrefr_low2 0x08 /* unused */
474 1.1 ober #define memcfg_msc_high 0x0c
475 1.1 ober #define memcfg_msc_low 0x18
476 1.1 ober #define memcfg_mdrefr_91 0x24
477 1.1 ober
478 1.1 ober /*
479 1.1 ober * void pxa27x_frequency_change(int cccr, int clkcfg,
480 1.1 ober * struct pxa2x0_memcfg *memcfg)
481 1.1 ober *
482 1.1 ober * Change the core PLL frequency and SDRAM refresh rate, ensuring the
483 1.1 ober * proper sequence of operations. If the CCCR_A bit is clear and L
484 1.1 ober * is not equal to 7 the result is undefined.
485 1.1 ober */
486 1.1 ober .align 5
487 1.1 ober ENTRY(pxa27x_frequency_change)
488 1.1 ober stmdb sp!, {r0-r5, lr}
489 1.1 ober
490 1.1 ober /* Always write to CCCR before a frequency change. */
491 1.1 ober ldr r3, .Lclkmaniohp
492 1.1 ober ldr r3, [r3]
493 1.1 ober str r0, [r3, #CLKMAN_CCCR]
494 1.1 ober
495 1.1 ober /* Load the needed values into registers to avoid SDRAM access. */
496 1.1 ober and r3, r0, #CCCR_L_MASK
497 1.1 ober ldr r0, .Lmemctliohp
498 1.1 ober ldr r0, [r0]
499 1.1 ober cmp r3, #CCCR_RUN_X7 /* L=7 is 91MHz mode */
500 1.1 ober beq frequency_change_91
501 1.1 ober and r3, r1, #CLKCFG_B
502 1.1 ober cmp r3, #CLKCFG_B
503 1.1 ober bne frequency_change_208
504 1.1 ober /* FALLTHROUGH */
505 1.1 ober frequency_change_high:
506 1.1 ober ldr r3, [r2, #memcfg_mdrefr_low]
507 1.1 ober ldr r4, [r2, #memcfg_mdrefr_high]
508 1.1 ober add r2, r2, #memcfg_msc_high
509 1.1 ober bl frequency_change_on_cache /* XXX why BL? */
510 1.1 ober frequency_change_208:
511 1.1 ober ldr r3, [r2, #memcfg_mdrefr_low]
512 1.1 ober ldr r4, [r2, #memcfg_mdrefr_low]
513 1.1 ober add r2, r2, #memcfg_msc_high
514 1.1 ober bl frequency_change_on_cache
515 1.1 ober frequency_change_91:
516 1.1 ober ldr r3, [r2, #memcfg_mdrefr_low]
517 1.1 ober ldr r4, [r2, #memcfg_mdrefr_91]
518 1.1 ober add r2, r2, #memcfg_msc_low
519 1.1 ober bl frequency_change_on_cache
520 1.1 ober
521 1.1 ober /* Align execution to a cache line. */
522 1.1 ober .align 5
523 1.1 ober frequency_change_on_cache:
524 1.1 ober /* Change to a low SDRAM refresh rate. Wait until the store to
525 1.1 ober * MDREFR is complete, following section 2.4 I/O Ordering and
526 1.1 ober * 6.5.1.4 of the PXA27x Developer's Manual. */
527 1.1 ober str r3, [r0, #MEMCTL_MDREFR]
528 1.1 ober ldr r5, [r0, #MEMCTL_MDREFR]
529 1.1 ober mov r5, r5
530 1.1 ober /* Program new CLKCFG value, starting a core PLL frequency change
531 1.1 ober * if CLKCFG_F is set. */
532 1.1 ober mcr p14, 0, r1, c6, c0, 0
533 1.1 ober /* Change SDRAM clock frequency to 104MHz, and ensure that the
534 1.1 ober * store to MDREFR is complete before the next SDRAM access. */
535 1.1 ober str r4, [r0, #MEMCTL_MDREFR]
536 1.1 ober ldr r5, [r0, #MEMCTL_MDREFR]
537 1.1 ober mov r5, r5
538 1.1 ober /* Configure synchronous, static, and VLIO interfaces. */
539 1.1 ober ldr r1, [r2], #4
540 1.1 ober str r1, [r0, #MEMCTL_MSC0]
541 1.1 ober ldr r1, [r2], #4
542 1.1 ober str r1, [r0, #MEMCTL_MSC1]
543 1.1 ober ldr r1, [r2]
544 1.1 ober str r1, [r0, #MEMCTL_MSC2]
545 1.1 ober ldmia sp!, {r0-r5, pc}
546 1.1 ober
547 1.1 ober /*
548 1.1 ober * void pxa27x_cpu_speed_91(void)
549 1.1 ober *
550 1.1 ober * Switch core run frequency to 91 MHz.
551 1.1 ober */
552 1.1 ober .align 5
553 1.1 ober ENTRY(pxa27x_cpu_speed_91)
554 1.1 ober stmdb sp!, {r0-r3, lr}
555 1.1 ober
556 1.1 ober ldr r0, .Lclkmaniohp
557 1.1 ober ldr r0, [r0]
558 1.1 ober ldr r1, .Lcccr_91
559 1.1 ober str r1, [r0, #CLKMAN_CCCR]
560 1.1 ober
561 1.1 ober ldr r0, .Lmemctliohp
562 1.1 ober ldr r0, [r0]
563 1.1 ober ldr r2, .Lmdrefr_91
564 1.1 ober ldr r3, .Lmdrefr_low
565 1.1 ober
566 1.1 ober bl 1f
567 1.1 ober .align 5
568 1.1 ober 1:
569 1.1 ober str r3, [r0, #MEMCTL_MDREFR]
570 1.1 ober ldr r3, [r0, #MEMCTL_MDREFR]
571 1.1 ober
572 1.1 ober mov r1, #CLKCFG_F
573 1.1 ober mcr p14, 0, r1, c6, c0, 0
574 1.1 ober str r2, [r0, #MEMCTL_MDREFR]
575 1.1 ober ldr r2, [r0, #MEMCTL_MDREFR]
576 1.1 ober
577 1.1 ober ldr r1, .Lmsc0_low
578 1.1 ober str r1, [r0, #MEMCTL_MSC0]
579 1.1 ober ldr r1, .Lmsc1_low
580 1.1 ober str r1, [r0, #MEMCTL_MSC1]
581 1.1 ober ldr r1, .Lmsc2_low
582 1.1 ober str r1, [r0, #MEMCTL_MSC2]
583 1.1 ober
584 1.1 ober ldmia sp!, {r0-r3, pc}
585 1.1 ober
586 1.1 ober .Lcccr_91: .word CCCR_TURBO_X1 | CCCR_RUN_X7
587 1.1 ober .Lmdrefr_91: .word MDREFR_SPEED_91
588