2 * linux/arch/arm/mach-omap2/sleep.S
6 * Karthik Dasu <karthik-dp@ti.com>
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
27 #include <linux/linkage.h>
28 #include <asm/assembler.h>
36 #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c
38 #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \
39 OMAP3430_PM_PREPWSTST)
40 #define PM_PREPWSTST_CORE_P 0x48306AE8
41 #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \
42 OMAP3430_PM_PREPWSTST)
43 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
44 #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
45 #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
46 #define SRAM_BASE_P 0x40200000
47 #define CONTROL_STAT 0x480022F0
48 #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\
49 + OMAP36XX_CONTROL_MEM_RTA_CTRL)
50 #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
52 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
53 + SCRATCHPAD_MEM_OFFS)
54 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
55 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
56 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
57 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
58 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
59 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
60 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
61 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
62 #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
63 #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
66 /* Function to acquire the semaphore in scratchpad */
67 ENTRY(lock_scratchpad_sem)
68 stmfd sp!, {lr} @ save registers on stack
71 ldr r1, sdrc_scratchpad_sem
73 ldr r2, [r1] @ load the lock value
74 cmp r2, r0 @ is the lock free ?
75 beq wait_loop @ not free...
76 swp r2, r0, [r1] @ semaphore free so lock it and proceed
77 cmp r2, r0 @ did we succeed ?
78 beq wait_sem @ no - try again
79 ldmfd sp!, {pc} @ restore regs and return
81 .word SDRC_SCRATCHPAD_SEM_V
82 ENTRY(lock_scratchpad_sem_sz)
83 .word . - lock_scratchpad_sem
86 /* Function to release the scratchpad semaphore */
87 ENTRY(unlock_scratchpad_sem)
88 stmfd sp!, {lr} @ save registers on stack
89 ldr r3, sdrc_scratchpad_sem
92 ldmfd sp!, {pc} @ restore regs and return
93 ENTRY(unlock_scratchpad_sem_sz)
94 .word . - unlock_scratchpad_sem
97 /* Function call to get the restore pointer for resume from OFF */
98 ENTRY(get_restore_pointer)
99 stmfd sp!, {lr} @ save registers on stack
101 ldmfd sp!, {pc} @ restore regs and return
102 ENTRY(get_restore_pointer_sz)
103 .word . - get_restore_pointer
105 /* Function call to get the restore pointer for 3630 resume from OFF */
106 ENTRY(get_omap3630_restore_pointer)
107 stmfd sp!, {lr} @ save registers on stack
109 ldmfd sp!, {pc} @ restore regs and return
110 ENTRY(get_omap3630_restore_pointer_sz)
111 .word . - get_omap3630_restore_pointer
115 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
116 * This function sets up a fflag that will allow for this toggling to take
117 * place on 3630. Hopefully some version in the future maynot need this
119 ENTRY(enable_omap3630_toggle_l2_on_restore)
120 stmfd sp!, {lr} @ save registers on stack
121 /* Setup so that we will disable and enable l2 */
124 ldmfd sp!, {pc} @ restore regs and return
127 /* Function call to get the restore pointer for for ES3 to resume from OFF */
128 ENTRY(get_es3_restore_pointer)
129 stmfd sp!, {lr} @ save registers on stack
131 ldmfd sp!, {pc} @ restore regs and return
132 ENTRY(get_es3_restore_pointer_sz)
133 .word . - get_es3_restore_pointer
136 ldr r4, sdrc_syscfg @ get config addr
137 ldr r5, [r4] @ get value
138 tst r5, #0x100 @ is part access blocked
140 biceq r5, r5, #0x100 @ clear bit if set
141 str r5, [r4] @ write back change
142 ldr r4, sdrc_mr_0 @ get config addr
143 ldr r5, [r4] @ get value
144 str r5, [r4] @ write back change
145 ldr r4, sdrc_emr2_0 @ get config addr
146 ldr r5, [r4] @ get value
147 str r5, [r4] @ write back change
148 ldr r4, sdrc_manual_0 @ get config addr
149 mov r5, #0x2 @ autorefresh command
150 str r5, [r4] @ kick off refreshes
151 ldr r4, sdrc_mr_1 @ get config addr
152 ldr r5, [r4] @ get value
153 str r5, [r4] @ write back change
154 ldr r4, sdrc_emr2_1 @ get config addr
155 ldr r5, [r4] @ get value
156 str r5, [r4] @ write back change
157 ldr r4, sdrc_manual_1 @ get config addr
158 mov r5, #0x2 @ autorefresh command
159 str r5, [r4] @ kick off refreshes
162 .word SDRC_SYSCONFIG_P
168 .word SDRC_MANUAL_0_P
174 .word SDRC_MANUAL_1_P
175 ENTRY(es3_sdrc_fix_sz)
176 .word . - es3_sdrc_fix
178 /* Function to call rom code to save secure ram context */
179 ENTRY(save_secure_ram_context)
180 stmfd sp!, {r1-r12, lr} @ save registers on stack
181 save_secure_ram_debug:
182 /* b save_secure_ram_debug */ @ enable to debug save code
183 adr r3, api_params @ r3 points to parameters
184 str r0, [r3,#0x4] @ r0 has sdram address
187 ldr r12, sram_phy_addr_mask
189 mov r0, #25 @ set service ID for PPA
190 mov r12, r0 @ copy secure service ID in r12
191 mov r1, #0 @ set task id for ROM code in r1
192 mov r2, #4 @ set some flags in r2, r6
194 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
195 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
196 .word 0xE1600071 @ call SMI monitor (smi #1)
201 ldmfd sp!, {r1-r12, pc}
207 .word 0x4, 0x0, 0x0, 0x1, 0x1
208 ENTRY(save_secure_ram_context_sz)
209 .word . - save_secure_ram_context
212 * Forces OMAP into idle state
214 * omap34xx_suspend() - This bit of code just executes the WFI
217 * Note: This code get's copied to internal SRAM at boot. When the OMAP
218 * wakes up it continues execution at the point it went to sleep.
220 ENTRY(omap34xx_cpu_suspend)
221 stmfd sp!, {r0-r12, lr} @ save registers on stack
223 /*b loop*/ @Enable to debug by stepping through code
224 /* r0 contains restore pointer in sdram */
225 /* r1 contains information about saving context */
226 ldr r4, sdrc_power @ read the SDRC_POWER register
227 ldr r5, [r4] @ read the contents of SDRC_POWER
228 orr r5, r5, #0x40 @ enable self refresh on idle req
229 str r5, [r4] @ write back to SDRC_POWER register
232 /* If context save is required, do that and execute wfi */
234 /* Data memory barrier and Data sync barrier */
236 mcr p15, 0, r1, c7, c10, 4
237 mcr p15, 0, r1, c7, c10, 5
239 wfi @ wait for interrupt
253 ldmfd sp!, {r0-r12, pc} @ restore regs and return
255 /*b restore_es3*/ @ Enable to debug restore code
256 ldr r5, pm_prepwstst_core_p
259 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
263 ldr r2, es3_sdrc_fix_sz
266 ldmia r0!, {r3} @ val = *src
267 stmia r1!, {r3} @ *dst = val
268 subs r2, r2, #0x1 @ num_words--
275 /*b restore_es3630*/ @ Enable to debug restore code
276 ldr r1, pm_prepwstst_core_p
279 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
281 /* Disable RTA before giving control */
282 ldr r1, control_mem_rta
283 mov r2, #OMAP36XX_RTA_DISABLE
285 /* Fall thru for the remaining logic */
287 /* b restore*/ @ Enable to debug restore code
288 /* Check what was the reason for mpu reset and store the reason in r9*/
289 /* 1 - Only L1 and logic lost */
290 /* 2 - Only L2 lost - In this case, we wont be here */
291 /* 3 - Both L1 and L2 lost */
292 ldr r1, pm_pwstctrl_mpu
295 cmp r2, #0x0 @ Check if target power state was OFF or RET
296 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
297 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
301 cmp r0, #0x1 @ should we disable L2 on 3630?
303 mrc p15, 0, r0, c1, c0, 1
304 bic r0, r0, #2 @ disable L2 cache
305 mcr p15, 0, r0, c1, c0, 1
312 mov r0, #40 @ set service ID for PPA
313 mov r12, r0 @ copy secure Service ID in r12
314 mov r1, #0 @ set task id for ROM code in r1
315 mov r2, #4 @ set some flags in r2, r6
317 adr r3, l2_inv_api_params @ r3 points to dummy parameters
318 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
319 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
320 .word 0xE1600071 @ call SMI monitor (smi #1)
321 /* Write to Aux control register to set some bits */
322 mov r0, #42 @ set service ID for PPA
323 mov r12, r0 @ copy secure Service ID in r12
324 mov r1, #0 @ set task id for ROM code in r1
325 mov r2, #4 @ set some flags in r2, r6
327 ldr r4, scratchpad_base
328 ldr r3, [r4, #0xBC] @ r3 points to parameters
329 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
330 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
331 .word 0xE1600071 @ call SMI monitor (smi #1)
333 #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
334 /* Restore L2 aux control register */
335 @ set service ID for PPA
336 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
337 mov r12, r0 @ copy service ID in r12
338 mov r1, #0 @ set task ID for ROM code in r1
339 mov r2, #4 @ set some flags in r2, r6
341 ldr r4, scratchpad_base
343 adds r3, r3, #8 @ r3 points to parameters
344 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
345 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
346 .word 0xE1600071 @ call SMI monitor (smi #1)
352 /* Execute smi to invalidate L2 cache */
353 mov r12, #0x1 @ set up to invalide L2
354 smi: .word 0xE1600070 @ Call SMI monitor (smieq)
355 /* Write to Aux control register to set some bits */
356 ldr r4, scratchpad_base
360 .word 0xE1600070 @ Call SMI monitor (smieq)
361 ldr r4, scratchpad_base
365 .word 0xE1600070 @ Call SMI monitor (smieq)
368 cmp r1, #0x1 @ Do we need to re-enable L2 on 3630?
370 mrc p15, 0, r1, c1, c0, 1
371 orr r1, r1, #2 @ re-enable L2 cache
372 mcr p15, 0, r1, c1, c0, 1
375 /* Invalidate all instruction caches to PoU
376 * and flush branch target cache */
377 mcr p15, 0, r1, c7, c5, 0
379 ldr r4, scratchpad_base
388 /* Coprocessor access Control Register */
389 mcr p15, 0, r4, c1, c0, 2
392 MCR p15, 0, r5, c2, c0, 0
394 MCR p15, 0, r6, c2, c0, 1
395 /* Translation table base control register */
396 MCR p15, 0, r7, c2, c0, 2
397 /*domain access Control Register */
398 MCR p15, 0, r8, c3, c0, 0
399 /* data fault status Register */
400 MCR p15, 0, r9, c5, c0, 0
403 /* instruction fault status Register */
404 MCR p15, 0, r4, c5, c0, 1
405 /*Data Auxiliary Fault Status Register */
406 MCR p15, 0, r5, c5, c1, 0
407 /*Instruction Auxiliary Fault Status Register*/
408 MCR p15, 0, r6, c5, c1, 1
409 /*Data Fault Address Register */
410 MCR p15, 0, r7, c6, c0, 0
411 /*Instruction Fault Address Register*/
412 MCR p15, 0, r8, c6, c0, 2
415 /* user r/w thread and process ID */
416 MCR p15, 0, r4, c13, c0, 2
417 /* user ro thread and process ID */
418 MCR p15, 0, r5, c13, c0, 3
419 /*Privileged only thread and process ID */
420 MCR p15, 0, r6, c13, c0, 4
421 /* cache size selection */
422 MCR p15, 2, r7, c0, c0, 0
424 /* Data TLB lockdown registers */
425 MCR p15, 0, r4, c10, c0, 0
426 /* Instruction TLB lockdown registers */
427 MCR p15, 0, r5, c10, c0, 1
428 /* Secure or Nonsecure Vector Base Address */
429 MCR p15, 0, r6, c12, c0, 0
431 MCR p15, 0, r7, c13, c0, 0
433 MCR p15, 0, r8, c13, c0, 1
436 /* primary memory remap register */
437 MCR p15, 0, r4, c10, c2, 0
438 /*normal memory remap register */
439 MCR p15, 0, r5, c10, c2, 1
442 ldmia r3!,{r4} /*load CPSR from SDRAM*/
443 msr cpsr, r4 /*store cpsr */
445 /* Enabling MMU here */
446 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
447 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
452 /* More work needs to be done to support N[0:2] value other than 0
453 * So looping here so that the error can be detected
457 mrc p15, 0, r2, c2, c0, 0
461 ldr r5, table_index_mask
462 and r4, r5 /* r4 = 31 to 20 bits of pc */
463 /* Extract the value to be written to table entry */
465 add r1, r1, r4 /* r1 has value to be written to table entry*/
466 /* Getting the address of table entry to modify */
468 add r2, r4 /* r2 has the location which needs to be modified */
469 /* Storing previous entry of location being modified */
470 ldr r5, scratchpad_base
473 /* Modify the table entry */
475 /* Storing address of entry being modified
476 * - will be restored after enabling MMU */
477 ldr r5, scratchpad_base
481 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
482 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
483 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
484 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
485 /* Restore control register but dont enable caches here*/
486 /* Caches will be enabled after restoring MMU table entry */
488 /* Store previous value of control register in scratchpad */
490 ldr r2, cache_pred_disable_mask
492 mcr p15, 0, r4, c1, c0, 0
494 ldmfd sp!, {r0-r12, pc} @ restore regs and return
496 /*b save_context_wfi*/ @ enable to debug save code
497 mov r8, r0 /* Store SDRAM address in r8 */
498 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
499 mov r4, #0x1 @ Number of parameters for restore call
500 stmia r8!, {r4-r5} @ Push parameters for restore call
501 mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
502 stmia r8!, {r4-r5} @ Push parameters for restore call
503 /* Check what that target sleep state is:stored in r1*/
504 /* 1 - Only L1 and logic lost */
505 /* 2 - Only L2 lost */
506 /* 3 - Both L1 and L2 lost */
507 cmp r1, #0x2 /* Only L2 lost */
509 cmp r1, #0x1 /* L2 retained */
510 /* r9 stores whether to clean L2 or not*/
511 moveq r9, #0x0 /* Dont Clean L2 */
512 movne r9, #0x1 /* Clean L2 */
514 /* Store sp and spsr to SDRAM */
519 /* Save all ARM registers */
520 /* Coprocessor access control register */
521 mrc p15, 0, r6, c1, c0, 2
523 /* TTBR0, TTBR1 and Translation table base control */
524 mrc p15, 0, r4, c2, c0, 0
525 mrc p15, 0, r5, c2, c0, 1
526 mrc p15, 0, r6, c2, c0, 2
528 /* Domain access control register, data fault status register,
529 and instruction fault status register */
530 mrc p15, 0, r4, c3, c0, 0
531 mrc p15, 0, r5, c5, c0, 0
532 mrc p15, 0, r6, c5, c0, 1
534 /* Data aux fault status register, instruction aux fault status,
535 datat fault address register and instruction fault address register*/
536 mrc p15, 0, r4, c5, c1, 0
537 mrc p15, 0, r5, c5, c1, 1
538 mrc p15, 0, r6, c6, c0, 0
539 mrc p15, 0, r7, c6, c0, 2
541 /* user r/w thread and process ID, user r/o thread and process ID,
542 priv only thread and process ID, cache size selection */
543 mrc p15, 0, r4, c13, c0, 2
544 mrc p15, 0, r5, c13, c0, 3
545 mrc p15, 0, r6, c13, c0, 4
546 mrc p15, 2, r7, c0, c0, 0
548 /* Data TLB lockdown, instruction TLB lockdown registers */
549 mrc p15, 0, r5, c10, c0, 0
550 mrc p15, 0, r6, c10, c0, 1
552 /* Secure or non secure vector base address, FCSE PID, Context PID*/
553 mrc p15, 0, r4, c12, c0, 0
554 mrc p15, 0, r5, c13, c0, 0
555 mrc p15, 0, r6, c13, c0, 1
557 /* Primary remap, normal remap registers */
558 mrc p15, 0, r4, c10, c2, 0
559 mrc p15, 0, r5, c10, c2, 1
562 /* Store current cpsr*/
566 mrc p15, 0, r4, c1, c0, 0
567 /* save control register */
570 /* Clean Data or unified cache to POU*/
571 /* How to invalidate only L1 cache???? - #FIX_ME# */
572 /* mcr p15, 0, r11, c7, c11, 1 */
573 cmp r9, #1 /* Check whether L2 inval is required or not*/
577 * Jump out to kernel flush routine
578 * - reuse that code is better
579 * - it executes in a cached space so is faster than refetch per-block
580 * - should be faster and will change with kernel
581 * - 'might' have to copy address, load and jump to it
582 * - lr is used since we are running in SRAM currently.
589 /* Data memory barrier and Data sync barrier */
591 mcr p15, 0, r1, c7, c10, 4
592 mcr p15, 0, r1, c7, c10, 5
594 wfi @ wait for interrupt
606 /* restore regs and return */
607 ldmfd sp!, {r0-r12, pc}
609 /* Make sure SDRC accesses are ok */
612 /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */
613 ldr r4, cm_idlest_ckgen
619 ldr r4, cm_idlest1_core
624 /* allow DLL powerdown upon hw idle req */
631 /* Is dll in lock mode? */
632 ldr r4, sdrc_dlla_ctrl
636 /* wait till dll locks */
638 ldr r4, wait_dll_lock_counter
640 str r4, wait_dll_lock_counter
641 ldr r4, sdrc_dlla_status
642 mov r6, #8 /* Wait 20uS for lock */
652 /* disable/reenable DLL if not locked */
654 ldr r4, sdrc_dlla_ctrl
657 bic r6, #(1<<3) /* disable dll */
660 orr r6, r6, #(1<<3) /* enable dll */
666 b wait_dll_lock_timed
669 .word CM_IDLEST1_CORE_V
671 .word CM_IDLEST_CKGEN_V
673 .word SDRC_DLLA_STATUS_V
675 .word SDRC_DLLA_CTRL_V
677 .word PM_PREPWSTST_CORE_V
679 .word PM_PREPWSTST_CORE_P
681 .word PM_PREPWSTST_MPU_V
683 .word PM_PWSTCTRL_MPU_P
685 .word SCRATCHPAD_BASE_P
687 .word SRAM_BASE_P + 0x8000
702 cache_pred_disable_mask:
707 .word CONTROL_MEM_RTA_CTRL
709 .word v7_flush_dcache_all
713 * When exporting to userspace while the counters are in SRAM,
714 * these 2 words need to be at the end to facilitate retrival!
718 wait_dll_lock_counter:
720 ENTRY(omap34xx_cpu_suspend_sz)
721 .word . - omap34xx_cpu_suspend