pxa2x0_apm_asm.S revision 1.2 1 /* $OpenBSD: pxa2x0_apm_asm.S,v 1.4 2007/11/02 05:18:25 miod Exp $ */
2
3 /*
4 * Copyright (c) 2005 Uwe Stuehler <uwe (at) openbsd.org>
5 *
6 * Permission to use, copy, modify, and distribute this software for any
7 * purpose with or without fee is hereby granted, provided that the above
8 * copyright notice and this permission notice appear in all copies.
9 *
10 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
11 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
12 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
13 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
14 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
15 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
16 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
17 */
18
19 #include <machine/asm.h>
20 #include <machine/cpu.h>
21
22 #include <arch/arm/xscale/pxa2x0reg.h>
23 #include <arch/arm/sa11x0/sa11x0_reg.h>
24
25 /* XXX replace with values defined elsewhere. */
26 #define DCACHE_CACHELINECOUNT 1024
27 #define CACHELINESIZE 32
28 #define DCACHE_SIZE (CACHELINESIZE * DCACHE_CACHELINECOUNT)
29
30 /* cp14 register 6 */
31 #define CLKCFG_T (1<<0) /* turbo */
32 #define CLKCFG_F (1<<1) /* frequency change */
33 #define CLKCFG_HT (1<<2) /* half-turbo */
34 #define CLKCFG_B (1<<3) /* fast-bus */
35
36 /* cp14 register 7 */
37 #define PWRMODE_NORMAL (0<<0)
38 #define PWRMODE_IDLE (1<<0)
39 #define PWRMODE_STANDBY (2<<0)
40 #define PWRMODE_SLEEP (3<<0)
41 #define PWRMODE_DEEP_SLEEP (7<<0)
42
43 /* XXX */
44 #define MDREFR_C3000 (MDREFR_K0DB2|MDREFR_E1PIN|MDREFR_K1RUN|\
45 MDREFR_K1DB2|MDREFR_K2DB2|MDREFR_APD)
46 #define MDREFR_DRI_91MHZ (0x13<<0)
47 #define MDREFR_HIGH (MDREFR_C3000 | 0x030)
48 #define MDREFR_LOW (MDREFR_C3000 | 0x00b)
49 #define MDREFR_SPEED_91 (MDREFR_C3000 | MDREFR_DRI_91MHZ)
50 #define MDREFR_SPEED_LOW (MDREFR_C3000 | 0x017)
51 #define MSC0_HIGH \
52 ( 7 << MSC_RRR_SHIFT << 16) | \
53 (15 << MSC_RDN_SHIFT << 16) | \
54 (15 << MSC_RDF_SHIFT << 16) | \
55 (MSC_RT_NONBURST << 16) | \
56 ( 2 << MSC_RRR_SHIFT) | \
57 (13 << MSC_RDN_SHIFT) | \
58 (13 << MSC_RDF_SHIFT) | \
59 MSC_RBW /* PXA271 */ | \
60 MSC_RT_NONBURST
61 #define MSC1_HIGH \
62 ( 7 << MSC_RRR_SHIFT << 16) | \
63 (15 << MSC_RDN_SHIFT << 16) | \
64 (15 << MSC_RDF_SHIFT << 16) | \
65 (MSC_RT_VLIO << 16) | \
66 ( 3 << MSC_RRR_SHIFT) | \
67 ( 4 << MSC_RDN_SHIFT) | \
68 (13 << MSC_RDF_SHIFT) | \
69 MSC_RT_VLIO
70 #define MSC2_HIGH \
71 ( 7 << MSC_RRR_SHIFT << 16) | \
72 (15 << MSC_RDN_SHIFT << 16) | \
73 (15 << MSC_RDF_SHIFT << 16) | \
74 (MSC_RT_NONBURST << 16) | \
75 ( 3 << MSC_RRR_SHIFT) | \
76 ( 4 << MSC_RDN_SHIFT) | \
77 (13 << MSC_RDF_SHIFT) | \
78 MSC_RT_VLIO
79 #define MSC0_LOW \
80 ( 7 << MSC_RRR_SHIFT << 16) | \
81 (15 << MSC_RDN_SHIFT << 16) | \
82 (15 << MSC_RDF_SHIFT << 16) | \
83 (MSC_RT_NONBURST << 16) | \
84 ( 1 << MSC_RRR_SHIFT) | \
85 ( 8 << MSC_RDN_SHIFT) | \
86 ( 8 << MSC_RDF_SHIFT) | \
87 MSC_RBW /* PXA271 */ | \
88 MSC_RT_NONBURST
89 #define MSC1_LOW \
90 ( 7 << MSC_RRR_SHIFT << 16) | \
91 (15 << MSC_RDN_SHIFT << 16) | \
92 (15 << MSC_RDF_SHIFT << 16) | \
93 (MSC_RT_VLIO << 16) | \
94 ( 1 << MSC_RRR_SHIFT) | \
95 ( 2 << MSC_RDN_SHIFT) | \
96 ( 6 << MSC_RDF_SHIFT) | \
97 MSC_RT_VLIO
98 #define MSC2_LOW \
99 ( 7 << MSC_RRR_SHIFT << 16) | \
100 (15 << MSC_RDN_SHIFT << 16) | \
101 (15 << MSC_RDF_SHIFT << 16) | \
102 (MSC_RT_NONBURST << 16) | \
103 ( 1 << MSC_RRR_SHIFT) | \
104 ( 2 << MSC_RDN_SHIFT) | \
105 ( 6 << MSC_RDF_SHIFT) | \
106 MSC_RT_VLIO
107
108 .text
109 .global _C_LABEL(vector_page)
110 .global _C_LABEL(xscale_cache_clean_addr)
111 .global _C_LABEL(pxa2x0_clkman_ioh)
112 .global _C_LABEL(pxa2x0_memctl_ioh)
113
114 .Lvector_page:
115 .word _C_LABEL(vector_page)
116 .Lxscale_cache_clean_addr:
117 .word _C_LABEL(xscale_cache_clean_addr)
118
119 .Lgpioiohp: .word _C_LABEL(pxa2x0_gpio_ioh)
120 .Lclkmaniohp: .word _C_LABEL(pxa2x0_clkman_ioh)
121 .Lmemctliohp: .word _C_LABEL(pxa2x0_memctl_ioh)
122
123 .Lsleepdata: .word sleepdata
124 .Lsleepdata_phys: .word sleepdata - 0xc0200000 + 0xa0200000 /* XXX */
125 .Lsleepdata_svc: .word sleepdata_svc
126
127 .Lcccr_high: .word CCCR_A | CCCR_TURBO_X2 | CCCR_RUN_X16
128 .Lmdrefr_high: .word MDREFR_HIGH
129 .Lmsc0_high: .word MSC0_HIGH
130 .Lmsc1_high: .word MSC1_HIGH
131 .Lmsc2_high: .word MSC2_HIGH
132 .Lmdrefr_low: .word MDREFR_LOW
133 .Lmsc0_low: .word MSC0_LOW
134 .Lmsc1_low: .word MSC1_LOW
135 .Lmsc2_low: .word MSC2_LOW
136
137 /*
138 * void pxa2x0_cpu_suspend(void)
139 *
140 * Enter sleep mode without automatic voltage change. The core must
141 * be in low power mode, and interrupts disabled.
142 */
143 ENTRY(pxa2x0_cpu_suspend)
144 stmdb sp!, {r0-r12, lr}
145
146 ldr r3, .Lsleepdata /* Point to the data area. */
147 ldr r2, =pxa2x0_cpu_resume_virt
148 str r2, [r3], #4
149
150 mrc p15, 0, r2, c1, c0, 0 /* Load MMU control register. */
151 mov r0, #0xff000000
152 orr r0, r0, #0x00ff0000
153 bic r2, r2, r0 /* Clear undefined bits. */
154 str r2, [r3], #4 /* Save MMU control register. */
155
156 mrc p15, 0, r2, c2, c0, 0 /* Load TTB address. */
157 mov r0, #0x00003f00
158 orr r0, r0, #0x000000ff
159 bic r2, r2, r0 /* Clear undefined bits. */
160 str r2, [r3], #4 /* Save TTB address. */
161
162 mrc p15, 0, r2, c3, c0, 0 /* Load domain access control. */
163 str r2, [r3], #4 /* Save domain access control. */
164
165 mrs r2, spsr /* Load SVC saved CPSR. */
166 str r2, [r3], #4 /* Save SVC saved CPSR. */
167 str sp, [r3], #4 /* Save SVC stack pointer. */
168
169 mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
170 msr cpsr, r1 /* Enter FIQ mode. */
171 mrs r2, spsr /* Load FIQ mode saved CPSR. */
172 stmia r3!, {r2, r8-r12, sp, lr} /* Save FIQ mode registers. */
173
174 mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
175 msr cpsr, r1 /* Enter IRQ mode. */
176 mrs r0, spsr /* Load IRQ mode saved CPSR. */
177 stmia r3!, {r0, sp, lr} /* Save IRQ mode registers. */
178
179 mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
180 msr cpsr, r1 /* Enter ABT mode. */
181 mrs r0, spsr /* Load ABT mode saved CPSR. */
182 stmia r3!, {r0, sp, lr} /* Save ABT mode registers. */
183
184 mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
185 msr cpsr, r1 /* Enter UND mode. */
186 mrs r0, spsr /* Load UND mode saved CPSR. */
187 stmia r3!, {r0, sp, lr} /* Save UND mode registers. */
188
189 mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
190 msr cpsr, r1 /* Enter SYS mode. */
191 stmia r3!, {sp, lr} /* Save SYS mode registers. */
192
193 mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
194 msr cpsr, r1 /* Return to SVC mode. */
195
196 /* At this point all critical registers have been saved. */
197
198 mov r0, #0
199 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
200
201 mov r1, #DCACHE_CACHELINECOUNT
202 ldr r2, .Lxscale_cache_clean_addr
203 ldr r0, [r2]
204 /*
205 * For an explanation of the following two instructions, refer
206 * to the ``BUG ALERT'' section of the XSCALE_CACHE_CLEAN_PROLOGUE
207 * macro in arch/arm/arm/cpufunc_asm_xscale.S.
208 */
209 eor r0, r0, #(DCACHE_SIZE)
210 str r0, [r2]
211
212 cache_flush_loop:
213 mrs r2, cpsr
214 orr r2, r2, #(I32_bit|F32_bit)
215 msr cpsr_c, r2 /* disable IRQ/FIQ */
216
217 mcr p15, 0, r0, c7, c2, 5 /* allocate cache line */
218 mcr p15, 0, r0, c7, c6, 1 /* flush D cache single entry */
219
220 mrs r2, cpsr
221 and r2, r2, #~(I32_bit|F32_bit)
222 msr cpsr_c, r2 /* enable IRQ/FIQ */
223
224 add r0, r0, #CACHELINESIZE
225 subs r1, r1, #1
226 bne cache_flush_loop
227
228 mov r0, #0
229 mcr p15, 0, r0, c7, c10, 4 /* drain write buffer */
230
231 b 1f
232 1:
233 nop
234 nop
235 nop
236 nop
237 nop
238 nop
239 nop
240 nop
241 nop
242 nop
243 nop
244
245 /* Prepare to enter sleep mode. */
246 mov r1, #PWRMODE_SLEEP
247
248 /* Prepare to put SDRAM into self-refresh mode. */
249 ldr r4, .Lmemctliohp
250 ldr r4, [r4]
251 add r4, r4, #MEMCTL_MDREFR
252 ldr r5, [r4]
253 orr r5, r5, #MDREFR_SLFRSH
254
255 /* XXX prepare pointer to physical address 0, but for whom? */
256 ldr r2, .Lvector_page
257
258 /*
259 * Execute the rest of this routine from cache. The needed values
260 * are now in registers.
261 */
262 b 1f
263 /* XXX tell as(1) to dump the literal pool here, but why? */
264 .ltorg
265 .align 5
266 1:
267
268 /* Put SDRAM into self-refresh mode manually. */
269 str r5, [r4]
270 nop
271
272 /*
273 * Enter sleep mode. Exit from sleep mode returns the processor
274 * to normal run mode. Execution resumes at the physical address
275 * stored in the PSPR after the required boot sequence (a short
276 * excursion into the ROM boot loader).
277 */
278 mcr p14, 0, r1, c7, c0, 0
279
280 /* Just in case that wake-up does not resume at */
281 nop
282 nop
283 nop
284 1:
285 b 1b
286
287 /*
288 * void pxa2x0_cpu_resume(void)
289 */
290 .align 5
291 ENTRY(pxa2x0_cpu_resume)
292 /* XXX C3000-specific */
293 ldr r0, .Lmdrefr_addr_phys
294 b 1f
295 .align 5
296 1:
297 ldr r2, [r0]
298 bic r2, r2, #MDREFR_DRI & 0x000000ff
299 bic r2, r2, #MDREFR_DRI & 0x0000ff00
300 orr r2, r2, #MDREFR_DRI_91MHZ
301 str r2, [r0]
302 b 1f
303 .align 5
304 1:
305 ldr r0, .Lsleepdata_phys /* Point to PA of saved data. */
306
307 ldmia r0!, {r7-r10}
308 mcr p15, 0, r10, c3, c0, 0 /* Restore domain access control. */
309 mcr p15, 0, r9, c2, c0, 0 /* Restore TTB address. */
310 mcr p15, 0, r0, c8, c7, 0 /* Flush I+D TLBs. */
311 mcr p15, 0, r0, c7, c7, 0 /* Flush I+D BTB. */
312 mcr p15, 0, r8, c1, c0, 0 /* Restore MMU control. */
313 mov pc, r7 /* Jump to virtual address. */
314 nop
315 nop
316 nop
317 nop
318 nop
319 nop
320 nop
321 nop
322
323 pxa2x0_cpu_resume_virt:
324 ldr r2, .Lsleepdata_svc /* Load VA of saved registers. */
325
326 /* Restore SVC mode SPSR and stack pointer. */
327 ldr r0, [r2], #4
328 msr spsr, r0
329 ldr sp, [r2], #4
330
331 /* Restore FIQ mode registers. */
332 mov r1, #(PSR_FIQ32_MODE | I32_bit | F32_bit)
333 msr cpsr, r1
334 ldr r0, [r2], #4
335 msr spsr, r0
336 ldr r8, [r2], #4
337 ldr r9, [r2], #4
338 ldr r10, [r2], #4
339 ldr r11, [r2], #4
340 ldr r12, [r2], #4
341 ldr sp, [r2], #4
342 ldr lr, [r2], #4
343
344 /* Restore IRQ mode registers. */
345 mov r1, #(PSR_IRQ32_MODE | I32_bit | F32_bit)
346 msr cpsr, r1
347 ldr r0, [r2], #4
348 msr spsr, r0
349 ldr sp, [r2], #4
350 ldr lr, [r2], #4
351
352 /* Restore ABT mode registers. */
353 mov r1, #(PSR_ABT32_MODE | I32_bit | F32_bit)
354 msr cpsr, r1
355 ldr r0, [r2], #4
356 msr spsr, r0
357 ldr sp, [r2], #4
358 ldr lr, [r2], #4
359
360 /* Restore UND mode registers. */
361 mov r1, #(PSR_UND32_MODE | I32_bit | F32_bit)
362 msr cpsr, r1
363 ldr r0, [r2], #4
364 msr spsr, r0
365 ldr sp, [r2], #4
366 ldr lr, [r2], #4
367
368 /* Restore SYS mode registers. */
369 mov r1, #(PSR_SYS32_MODE | I32_bit | F32_bit)
370 msr cpsr, r1
371 ldr sp, [r2], #4
372 ldr lr, [r2], #4
373
374 /* Return to SVC mode. */
375 mov r1, #(PSR_SVC32_MODE | I32_bit | F32_bit)
376 msr cpsr, r1
377
378 ldmia sp!, {r0-r12, pc}
379
380 .Lmdrefr_addr_phys:
381 .word PXA2X0_MEMCTL_BASE + MEMCTL_MDREFR
382
383 .data
384
385 /*
386 * Saved processor state
387 */
388 .align 5
389 sleepdata:
390 .word 0 /* =pxa2x0_cpu_resume_virt */
391 .word 0 /* MMU control */
392 .word 0 /* MMU TTB address */
393 .word 0 /* MMU domain access control */
394 sleepdata_svc:
395 .word 0 /* SVC mode saved CPSR */
396 .word 0 /* SVC mode stack pointer */
397 .word 0 /* FIQ mode saved CPSR */
398 .word 0 /* FIQ mode r8 */
399 .word 0 /* FIQ mode r9 */
400 .word 0 /* FIQ mode r10 */
401 .word 0 /* FIQ mode r11 */
402 .word 0 /* FIQ mode r12 */
403 .word 0 /* FIQ mode stack pointer */
404 .word 0 /* FIQ mode link register */
405 .word 0 /* IRQ mode saved CPSR */
406 .word 0 /* IRQ mode stack pointer */
407 .word 0 /* IRQ mode link register */
408 .word 0 /* ABT mode saved CPSR */
409 .word 0 /* ABT mode stack pointer */
410 .word 0 /* ABT mode link register */
411 .word 0 /* UND mode saved CPSR */
412 .word 0 /* UND mode stack pointer */
413 .word 0 /* UND mode link register */
414 .word 0 /* SYS mode stack pointer */
415 .word 0 /* SYS mode link register */
416
417 .text
418
419 /*
420 * void pxa27x_run_mode(void)
421 *
422 * Disable half-turbo and turbo mode, but keep fast-bus mode.
423 * Memory and LCD clock is not changed, so no reconfiguration is
424 * necessary.
425 */
426 ENTRY(pxa27x_run_mode)
427 stmdb sp!, {r0}
428 mrc p14, 0, r0, c6, c0, 0
429 and r0, r0, #~(CLKCFG_HT | CLKCFG_F| CLKCFG_T)
430 mcr p14, 0, r0, c6, c0, 0
431 ldmia sp!, {r0}
432 mov pc, lr
433
434 /*
435 * void pxa27x_fastbus_run_mode(int enable, uint32_t mdrefr)
436 *
437 * Enter normal run mode with fast-bus mode enabled or disabled.
438 * The new value of MDREFR is programmed before or after CLKCFG,
439 * as appropriate.
440 */
441 .align 5
442 ENTRY(pxa27x_fastbus_run_mode)
443 stmdb sp!, {r0-r2, lr}
444 ldr r2, .Lmemctliohp
445 ldr r2, [r2]
446 cmp r0, #0
447 beq disable_fastbus
448 b enable_fastbus
449 .align 5
450 enable_fastbus:
451 /* Enter normal run mode with fast-bus mode enabled. */
452 mov r0, #CLKCFG_B
453 mcr p14, 0, r0, c6, c0, 0
454 /* Set the new SDRAM refresh rate. */
455 str r1, [r2, #MEMCTL_MDREFR]
456 ldr r0, [r2, #MEMCTL_MDREFR]
457 mov r0, r0
458 ldmia sp!, {r0-r2, pc}
459 .align 5
460 disable_fastbus:
461 /* Set the new SDRAM refresh rate. */
462 str r1, [r2, #MEMCTL_MDREFR]
463 ldr r0, [r2, #MEMCTL_MDREFR]
464 mov r0, r0
465 /* Enter normal run mode with fast-bus mode disabled. */
466 mov r0, #0x0
467 mcr p14, 0, r0, c6, c0, 0
468 ldmia sp!, {r0-r2, pc}
469
470 /* Keep these offsets in sync with struct memcfg. */
471 #define memcfg_mdrefr_high 0x00
472 #define memcfg_mdrefr_low 0x04
473 #define memcfg_mdrefr_low2 0x08 /* unused */
474 #define memcfg_msc_high 0x0c
475 #define memcfg_msc_low 0x18
476 #define memcfg_mdrefr_91 0x24
477
478 /*
479 * void pxa27x_frequency_change(int cccr, int clkcfg,
480 * struct pxa2x0_memcfg *memcfg)
481 *
482 * Change the core PLL frequency and SDRAM refresh rate, ensuring the
483 * proper sequence of operations. If the CCCR_A bit is clear and L
484 * is not equal to 7 the result is undefined.
485 */
486 .align 5
487 ENTRY(pxa27x_frequency_change)
488 stmdb sp!, {r0-r5, lr}
489
490 /* Always write to CCCR before a frequency change. */
491 ldr r3, .Lclkmaniohp
492 ldr r3, [r3]
493 str r0, [r3, #CLKMAN_CCCR]
494
495 /* Load the needed values into registers to avoid SDRAM access. */
496 and r3, r0, #CCCR_L_MASK
497 ldr r0, .Lmemctliohp
498 ldr r0, [r0]
499 cmp r3, #CCCR_RUN_X7 /* L=7 is 91MHz mode */
500 beq frequency_change_91
501 and r3, r1, #CLKCFG_B
502 cmp r3, #CLKCFG_B
503 bne frequency_change_208
504 /* FALLTHROUGH */
505 frequency_change_high:
506 ldr r3, [r2, #memcfg_mdrefr_low]
507 ldr r4, [r2, #memcfg_mdrefr_high]
508 add r2, r2, #memcfg_msc_high
509 bl frequency_change_on_cache /* XXX why BL? */
510 frequency_change_208:
511 ldr r3, [r2, #memcfg_mdrefr_low]
512 ldr r4, [r2, #memcfg_mdrefr_low]
513 add r2, r2, #memcfg_msc_high
514 bl frequency_change_on_cache
515 frequency_change_91:
516 ldr r3, [r2, #memcfg_mdrefr_low]
517 ldr r4, [r2, #memcfg_mdrefr_91]
518 add r2, r2, #memcfg_msc_low
519 bl frequency_change_on_cache
520
521 /* Align execution to a cache line. */
522 .align 5
523 frequency_change_on_cache:
524 /* Change to a low SDRAM refresh rate. Wait until the store to
525 * MDREFR is complete, following section 2.4 I/O Ordering and
526 * 6.5.1.4 of the PXA27x Developer's Manual. */
527 str r3, [r0, #MEMCTL_MDREFR]
528 ldr r5, [r0, #MEMCTL_MDREFR]
529 mov r5, r5
530 /* Program new CLKCFG value, starting a core PLL frequency change
531 * if CLKCFG_F is set. */
532 mcr p14, 0, r1, c6, c0, 0
533 /* Change SDRAM clock frequency to 104MHz, and ensure that the
534 * store to MDREFR is complete before the next SDRAM access. */
535 str r4, [r0, #MEMCTL_MDREFR]
536 ldr r5, [r0, #MEMCTL_MDREFR]
537 mov r5, r5
538 /* Configure synchronous, static, and VLIO interfaces. */
539 ldr r1, [r2], #4
540 str r1, [r0, #MEMCTL_MSC0]
541 ldr r1, [r2], #4
542 str r1, [r0, #MEMCTL_MSC1]
543 ldr r1, [r2]
544 str r1, [r0, #MEMCTL_MSC2]
545 ldmia sp!, {r0-r5, pc}
546
547 /*
548 * void pxa27x_cpu_speed_91(void)
549 *
550 * Switch core run frequency to 91 MHz.
551 */
552 .align 5
553 ENTRY(pxa27x_cpu_speed_91)
554 stmdb sp!, {r0-r3, lr}
555
556 ldr r0, .Lclkmaniohp
557 ldr r0, [r0]
558 ldr r1, .Lcccr_91
559 str r1, [r0, #CLKMAN_CCCR]
560
561 ldr r0, .Lmemctliohp
562 ldr r0, [r0]
563 ldr r2, .Lmdrefr_91
564 ldr r3, .Lmdrefr_low
565
566 bl 1f
567 .align 5
568 1:
569 str r3, [r0, #MEMCTL_MDREFR]
570 ldr r3, [r0, #MEMCTL_MDREFR]
571
572 mov r1, #CLKCFG_F
573 mcr p14, 0, r1, c6, c0, 0
574 str r2, [r0, #MEMCTL_MDREFR]
575 ldr r2, [r0, #MEMCTL_MDREFR]
576
577 ldr r1, .Lmsc0_low
578 str r1, [r0, #MEMCTL_MSC0]
579 ldr r1, .Lmsc1_low
580 str r1, [r0, #MEMCTL_MSC1]
581 ldr r1, .Lmsc2_low
582 str r1, [r0, #MEMCTL_MSC2]
583
584 ldmia sp!, {r0-r3, pc}
585
586 .Lcccr_91: .word CCCR_TURBO_X1 | CCCR_RUN_X7
587 .Lmdrefr_91: .word MDREFR_SPEED_91
588