2 * linux/arch/arm/mach-omap2/sleep.S
6 * Karthik Dasu <karthik-dp@ti.com>
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27 #include <linux/linkage.h>
28 #include <asm/assembler.h>
29 #include <asm/arch/io.h>
30 #include <asm/arch/pm.h>
32 #define PM_PREPWSTST_CORE_V IO_ADDRESS(PRM_BASE + 0xAE8)
33 #define PM_PREPWSTST_MPU_V IO_ADDRESS(PRM_BASE + 0x9E8)
34 #define PM_PWSTCTRL_MPU_P (PRM_BASE + 0x9E0)
35 #define SCRATCHPAD_BASE_P 0x48002910
36 #define SDRC_POWER_V IO_ADDRESS(SDRC_BASE + 0x070)
39 /* Function call to get the restore pointer for resume from OFF */
40 ENTRY(get_restore_pointer)
41 stmfd sp!, {lr} @ save registers on stack
43 ldmfd sp!, {pc} @ restore regs and return
44 ENTRY(get_restore_pointer_sz)
45 .word . - get_restore_pointer_sz
47 * Forces OMAP into idle state
49 * omap34xx_suspend() - This bit of code just executes the WFI
52 * Note: This code get's copied to internal SRAM at boot. When the OMAP
53 * wakes up it continues execution at the point it went to sleep.
55 ENTRY(omap34xx_suspend)
56 stmfd sp!, {r0-r12, lr} @ save registers on stack
58 /*b loop*/ @Enable to debug by stepping through code
59 /* r0 contains restore pointer in sdram */
60 /* r1 contains information about saving context */
61 ldr r4, sdrc_power @ read the SDRC_POWER register
62 ldr r5, [r4] @ read the contents of SDRC_POWER
63 orr r5, r5, #0x40 @ enable self refresh on idle req
64 str r5, [r4] @ write back to SDRC_POWER register
67 /* If context save is required, do that and execute wfi */
69 /* Data memory barrier and Data sync barrier */
71 mcr p15, 0, r1, c7, c10, 4
72 mcr p15, 0, r1, c7, c10, 5
74 wfi @ wait for interrupt
88 ldmfd sp!, {r0-r12, pc} @ restore regs and return
90 /* b restore*/ @ Enable to debug restore code
91 /* Check what was the reason for mpu reset and store the reason in r9*/
92 /* 1 - Only L1 and logic lost */
93 /* 2 - Only L2 lost - In this case, we wont be here */
94 /* 3 - Both L1 and L2 lost */
95 ldr r1, pm_pwstctrl_mpu
98 cmp r2, #0x0 @ Check if target power state was OFF or RET
99 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
100 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
102 /* Execute smi to invalidate L2 cache */
103 mov r12, #0x1 @ set up to invalide L2
104 smi: .word 0xE1600070 @ Call SMI monitor (smieq)
107 /* Invalidate all instruction caches to PoU
108 * and flush branch target cache */
109 mcr p15, 0, r1, c7, c5, 0
111 ldr r4, scratchpad_base
119 /* Coprocessor access Control Register */
120 mcr p15, 0, r4, c1, c0, 2
123 MCR p15, 0, r5, c2, c0, 0
125 MCR p15, 0, r6, c2, c0, 1
126 /* Translation table base control register */
127 MCR p15, 0, r7, c2, c0, 2
128 /*domain access Control Register */
129 MCR p15, 0, r8, c3, c0, 0
130 /* data fault status Register */
131 MCR p15, 0, r9, c5, c0, 0
134 /* instruction fault status Register */
135 MCR p15, 0, r4, c5, c0, 1
136 /*Data Auxiliary Fault Status Register */
137 MCR p15, 0, r5, c5, c1, 0
138 /*Instruction Auxiliary Fault Status Register*/
139 MCR p15, 0, r6, c5, c1, 1
140 /*Data Fault Address Register */
141 MCR p15, 0, r7, c6, c0, 0
142 /*Instruction Fault Address Register*/
143 MCR p15, 0, r8, c6, c0, 2
146 /* user r/w thread and process ID */
147 MCR p15, 0, r4, c13, c0, 2
148 /* user ro thread and process ID */
149 MCR p15, 0, r5, c13, c0, 3
150 /*Privileged only thread and process ID */
151 MCR p15, 0, r6, c13, c0, 4
152 /* cache size selection */
153 MCR p15, 2, r7, c0, c0, 0
155 /* Data TLB lockdown registers */
156 MCR p15, 0, r4, c10, c0, 0
157 /* Instruction TLB lockdown registers */
158 MCR p15, 0, r5, c10, c0, 1
159 /* Secure or Nonsecure Vector Base Address */
160 MCR p15, 0, r6, c12, c0, 0
162 MCR p15, 0, r7, c13, c0, 0
164 MCR p15, 0, r8, c13, c0, 1
167 /* primary memory remap register */
168 MCR p15, 0, r4, c10, c2, 0
169 /*normal memory remap register */
170 MCR p15, 0, r5, c10, c2, 1
172 /* Restore registers for other modes from SDRAM */
173 /* Save current mode */
181 /* load the SP and LR from SDRAM */
183 mov sp, r4 /*update the SP */
184 mov lr, r5 /*update the LR */
185 msr spsr, r6 /*update the SPSR*/
190 msr cpsr, r0 /*go into IRQ mode*/
191 ldmia r3!,{r4-r6} /*load the SP and LR from SDRAM*/
192 mov sp, r4 /*update the SP */
193 mov lr, r5 /*update the LR */
194 msr spsr, r6 /*update the SPSR */
199 msr cpsr, r0 /* go into ABORT mode */
200 ldmia r3!,{r4-r6} /*load the SP and LR from SDRAM */
201 mov sp, r4 /*update the SP */
202 mov lr, r5 /*update the LR */
203 msr spsr, r6 /*update the SPSR */
208 msr cpsr, r0 /*go into UNDEF mode */
209 ldmia r3!,{r4-r6} /*load the SP and LR from SDRAM */
210 mov sp, r4 /*update the SP*/
211 mov lr, r5 /*update the LR*/
212 msr spsr, r6 /*update the SPSR*/
214 /* SYSTEM (USER) mode */
217 msr cpsr, r0 /*go into USR mode */
218 ldmia r3!,{r4-r6} /*load the SP and LR from SDRAM*/
219 mov sp, r4 /*update the SP */
220 mov lr, r5 /*update the LR */
221 msr spsr, r6 /*update the SPSR */
222 msr cpsr, r7 /*back to original mode*/
225 ldmia r3!,{r4} /*load CPSR from SDRAM*/
226 msr cpsr, r4 /*store cpsr */
228 /* Enabling MMU here */
229 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
230 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
235 /* More work needs to be done to support N[0:2] value other than 0
236 * So looping here so that the error can be detected
240 mrc p15, 0, r2, c2, c0, 0
244 ldr r5, table_index_mask
245 and r4, r5 /* r4 = 31 to 20 bits of pc */
246 /* Extract the value to be written to table entry */
248 add r1, r1, r4 /* r1 has value to be written to table entry*/
249 /* Getting the address of table entry to modify */
251 add r2, r4 /* r2 has the location which needs to be modified */
252 /* Storing previous entry of location being modified */
253 ldr r5, scratchpad_base
256 /* Modify the table entry */
258 /* Storing address of entry being modified
259 * - will be restored after enabling MMU */
260 ldr r5, scratchpad_base
264 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
265 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
266 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
267 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
268 /* Restore control register but dont enable caches here*/
269 /* Caches will be enabled after restoring MMU table entry */
271 /* Store previous value of control register in scratchpad */
273 ldr r2, cache_pred_disable_mask
275 mcr p15, 0, r4, c1, c0, 0
277 ldmfd sp!, {r0-r12, pc} @ restore regs and return
279 /*b save_context_wfi*/ @ enable to debug save code
280 mov r8, r0 /* Store SDRAM address in r8 */
281 /* Check what that target sleep state is:stored in r1*/
282 /* 1 - Only L1 and logic lost */
283 /* 2 - Only L2 lost */
284 /* 3 - Both L1 and L2 lost */
285 cmp r1, #0x2 /* Only L2 lost */
287 cmp r1, #0x1 /* L2 retained */
288 /* r9 stores whether to clean L2 or not*/
289 moveq r9, #0x0 /* Dont Clean L2 */
290 movne r9, #0x1 /* Clean L2 */
292 /* Store sp and spsr to SDRAM */
297 /* Save all ARM registers */
298 /* Coprocessor access control register */
299 mrc p15, 0, r6, c1, c0, 2
301 /* TTBR0, TTBR1 and Translation table base control */
302 mrc p15, 0, r4, c2, c0, 0
303 mrc p15, 0, r5, c2, c0, 1
304 mrc p15, 0, r6, c2, c0, 2
306 /* Domain access control register, data fault status register,
307 and instruction fault status register */
308 mrc p15, 0, r4, c3, c0, 0
309 mrc p15, 0, r5, c5, c0, 0
310 mrc p15, 0, r6, c5, c0, 1
312 /* Data aux fault status register, instruction aux fault status,
313 datat fault address register and instruction fault address register*/
314 mrc p15, 0, r4, c5, c1, 0
315 mrc p15, 0, r5, c5, c1, 1
316 mrc p15, 0, r6, c6, c0, 0
317 mrc p15, 0, r7, c6, c0, 2
319 /* user r/w thread and process ID, user r/o thread and process ID,
320 priv only thread and process ID, cache size selection */
321 mrc p15, 0, r4, c13, c0, 2
322 mrc p15, 0, r5, c13, c0, 3
323 mrc p15, 0, r6, c13, c0, 4
324 mrc p15, 2, r7, c0, c0, 0
326 /* Data TLB lockdown, instruction TLB lockdown registers */
327 mrc p15, 0, r5, c10, c0, 0
328 mrc p15, 0, r6, c10, c0, 1
330 /* Secure or non secure vector base address, FCSE PID, Context PID*/
331 mrc p15, 0, r4, c12, c0, 0
332 mrc p15, 0, r5, c13, c0, 0
333 mrc p15, 0, r6, c13, c0, 1
335 /* Primary remap, normal remap registers */
336 mrc p15, 0, r4, c10, c2, 0
337 mrc p15, 0, r5, c10, c2, 1
339 /* Store SP, LR, SPSR registers for SUP, FIQ, IRQ, ABORT and USER
342 /* move SDRAM address to r7 as r8 is banked in FIQ*/
345 /* Save current mode */
350 msr cpsr, r0 /* go to FIQ mode */
352 mov r4, r13 /* move SP into r4*/
384 /* System (USER mode) */
393 /* Back to original mode */
396 /* Store current cpsr*/
399 mrc p15, 0, r4, c1, c0, 0
400 /* save control register */
403 /* Clean Data or unified cache to POU*/
404 /* How to invalidate only L1 cache???? - #FIX_ME# */
405 /* mcr p15, 0, r11, c7, c11, 1 */
406 cmp r9, #1 /* Check whether L2 inval is required or not*/
410 mrc p15, 1, r0, c0, c0, 1
411 /* extract loc from clidr */
412 ands r3, r0, #0x7000000
413 /* left align loc bit field */
415 /* if loc is 0, then no need to clean */
417 /* start clean at cache level 0 */
420 /* work out 3x current cache level */
421 add r2, r10, r10, lsr #1
422 /* extract cache type bits from clidr*/
424 /* mask of the bits for current cache only */
426 /* see what cache we have at this level */
428 /* skip if no cache, or just i-cache */
430 /* select current cache level in cssr */
431 mcr p15, 2, r10, c0, c0, 0
432 /* isb to sych the new cssr&csidr */
434 /* read the new csidr */
435 mrc p15, 1, r1, c0, c0, 0
436 /* extract the length of the cache lines */
438 /* add 4 (line length offset) */
441 /* find maximum number on the way size */
442 ands r4, r4, r1, lsr #3
443 /* find bit position of way size increment */
446 /* extract max number of the index size*/
447 ands r7, r7, r1, lsr #13
450 /* create working copy of max way size*/
452 /* factor way and cache number into r11 */
453 orr r11, r10, r9, lsl r5
454 /* factor index number into r11 */
455 orr r11, r11, r7, lsl r2
456 /*clean & invalidate by set/way */
457 mcr p15, 0, r11, c7, c10, 2
458 /* decrement the way*/
461 /*decrement the index */
466 /* increment cache number */
470 /*swith back to cache level 0 */
472 /* select current cache level in cssr */
473 mcr p15, 2, r10, c0, c0, 0
476 /* Data memory barrier and Data sync barrier */
478 mcr p15, 0, r1, c7, c10, 4
479 mcr p15, 0, r1, c7, c10, 5
481 wfi @ wait for interrupt
493 /* restore regs and return */
494 ldmfd sp!, {r0-r12, pc}
497 ldr r4, clk_stabilize_delay
508 .word PM_PREPWSTST_CORE_V
510 .word PM_PREPWSTST_MPU_V
512 .word PM_PWSTCTRL_MPU_P
514 .word SCRATCHPAD_BASE_P
531 cache_pred_disable_mask:
533 ENTRY(omap34xx_suspend_sz)
534 .word . - omap34xx_suspend