]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - arch/arm/mach-omap2/sleep34xx.S
8fa9dfe813edd765544fb9368ba8ae67f3b84ff3
[mirror_ubuntu-zesty-kernel.git] / arch / arm / mach-omap2 / sleep34xx.S
1 /*
2 * linux/arch/arm/mach-omap2/sleep.S
3 *
4 * (C) Copyright 2007
5 * Texas Instruments
6 * Karthik Dasu <karthik-dp@ti.com>
7 *
8 * (C) Copyright 2004
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
16 *
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 * MA 02111-1307 USA
26 */
27 #include <linux/linkage.h>
28 #include <asm/assembler.h>
29 #include <mach/io.h>
30
31 #include "cm.h"
32 #include "prm.h"
33 #include "sdrc.h"
34 #include "control.h"
35
36 #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c
37
38 #define PM_PREPWSTST_CORE_P 0x48306AE8
39 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
40 #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
41 #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
42 #define SRAM_BASE_P 0x40200000
43 #define CONTROL_STAT 0x480022F0
44 #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\
45 + OMAP36XX_CONTROL_MEM_RTA_CTRL)
46 #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
47 * available */
48 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
49 + SCRATCHPAD_MEM_OFFS)
50 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
51 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
52 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
53 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
54 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
55 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
56 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
57 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
58 #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
59 #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
60
61
62 /*
63 * API functions
64 */
65
66 .text
67 /* Function call to get the restore pointer for resume from OFF */
68 ENTRY(get_restore_pointer)
69 stmfd sp!, {lr} @ save registers on stack
70 adr r0, restore
71 ldmfd sp!, {pc} @ restore regs and return
72 ENTRY(get_restore_pointer_sz)
73 .word . - get_restore_pointer
74 .text
75 /* Function call to get the restore pointer for 3630 resume from OFF */
76 ENTRY(get_omap3630_restore_pointer)
77 stmfd sp!, {lr} @ save registers on stack
78 adr r0, restore_3630
79 ldmfd sp!, {pc} @ restore regs and return
80 ENTRY(get_omap3630_restore_pointer_sz)
81 .word . - get_omap3630_restore_pointer
82
83 .text
84 /*
85 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
86 * This function sets up a fflag that will allow for this toggling to take
87 * place on 3630. Hopefully some version in the future maynot need this
88 */
89 ENTRY(enable_omap3630_toggle_l2_on_restore)
90 stmfd sp!, {lr} @ save registers on stack
91 /* Setup so that we will disable and enable l2 */
92 mov r1, #0x1
93 str r1, l2dis_3630
94 ldmfd sp!, {pc} @ restore regs and return
95
96 .text
97 /* Function call to get the restore pointer for for ES3 to resume from OFF */
98 ENTRY(get_es3_restore_pointer)
99 stmfd sp!, {lr} @ save registers on stack
100 adr r0, restore_es3
101 ldmfd sp!, {pc} @ restore regs and return
102 ENTRY(get_es3_restore_pointer_sz)
103 .word . - get_es3_restore_pointer
104
105 ENTRY(es3_sdrc_fix)
106 ldr r4, sdrc_syscfg @ get config addr
107 ldr r5, [r4] @ get value
108 tst r5, #0x100 @ is part access blocked
109 it eq
110 biceq r5, r5, #0x100 @ clear bit if set
111 str r5, [r4] @ write back change
112 ldr r4, sdrc_mr_0 @ get config addr
113 ldr r5, [r4] @ get value
114 str r5, [r4] @ write back change
115 ldr r4, sdrc_emr2_0 @ get config addr
116 ldr r5, [r4] @ get value
117 str r5, [r4] @ write back change
118 ldr r4, sdrc_manual_0 @ get config addr
119 mov r5, #0x2 @ autorefresh command
120 str r5, [r4] @ kick off refreshes
121 ldr r4, sdrc_mr_1 @ get config addr
122 ldr r5, [r4] @ get value
123 str r5, [r4] @ write back change
124 ldr r4, sdrc_emr2_1 @ get config addr
125 ldr r5, [r4] @ get value
126 str r5, [r4] @ write back change
127 ldr r4, sdrc_manual_1 @ get config addr
128 mov r5, #0x2 @ autorefresh command
129 str r5, [r4] @ kick off refreshes
130 bx lr
131 sdrc_syscfg:
132 .word SDRC_SYSCONFIG_P
133 sdrc_mr_0:
134 .word SDRC_MR_0_P
135 sdrc_emr2_0:
136 .word SDRC_EMR2_0_P
137 sdrc_manual_0:
138 .word SDRC_MANUAL_0_P
139 sdrc_mr_1:
140 .word SDRC_MR_1_P
141 sdrc_emr2_1:
142 .word SDRC_EMR2_1_P
143 sdrc_manual_1:
144 .word SDRC_MANUAL_1_P
145 ENTRY(es3_sdrc_fix_sz)
146 .word . - es3_sdrc_fix
147
148 /* Function to call rom code to save secure ram context */
149 ENTRY(save_secure_ram_context)
150 stmfd sp!, {r1-r12, lr} @ save registers on stack
151
152 adr r3, api_params @ r3 points to parameters
153 str r0, [r3,#0x4] @ r0 has sdram address
154 ldr r12, high_mask
155 and r3, r3, r12
156 ldr r12, sram_phy_addr_mask
157 orr r3, r3, r12
158 mov r0, #25 @ set service ID for PPA
159 mov r12, r0 @ copy secure service ID in r12
160 mov r1, #0 @ set task id for ROM code in r1
161 mov r2, #4 @ set some flags in r2, r6
162 mov r6, #0xff
163 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
164 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
165 .word 0xE1600071 @ call SMI monitor (smi #1)
166 nop
167 nop
168 nop
169 nop
170 ldmfd sp!, {r1-r12, pc}
171 sram_phy_addr_mask:
172 .word SRAM_BASE_P
173 high_mask:
174 .word 0xffff
175 api_params:
176 .word 0x4, 0x0, 0x0, 0x1, 0x1
177 ENTRY(save_secure_ram_context_sz)
178 .word . - save_secure_ram_context
179
180 /*
181 * Forces OMAP into idle state
182 *
183 * omap34xx_suspend() - This bit of code just executes the WFI
184 * for normal idles.
185 *
186 * Note: This code get's copied to internal SRAM at boot. When the OMAP
187 * wakes up it continues execution at the point it went to sleep.
188 */
189 ENTRY(omap34xx_cpu_suspend)
190 stmfd sp!, {r0-r12, lr} @ save registers on stack
191
192 /* r0 contains restore pointer in sdram */
193 /* r1 contains information about saving context */
194 ldr r4, sdrc_power @ read the SDRC_POWER register
195 ldr r5, [r4] @ read the contents of SDRC_POWER
196 orr r5, r5, #0x40 @ enable self refresh on idle req
197 str r5, [r4] @ write back to SDRC_POWER register
198
199 cmp r1, #0x0
200 /* If context save is required, do that and execute wfi */
201 bne save_context_wfi
202 /* Data memory barrier and Data sync barrier */
203 mov r1, #0
204 mcr p15, 0, r1, c7, c10, 4
205 mcr p15, 0, r1, c7, c10, 5
206
207 wfi @ wait for interrupt
208
209 nop
210 nop
211 nop
212 nop
213 nop
214 nop
215 nop
216 nop
217 nop
218 nop
219 bl wait_sdrc_ok
220
221 ldmfd sp!, {r0-r12, pc} @ restore regs and return
222 restore_es3:
223 ldr r5, pm_prepwstst_core_p
224 ldr r4, [r5]
225 and r4, r4, #0x3
226 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
227 bne restore
228 adr r0, es3_sdrc_fix
229 ldr r1, sram_base
230 ldr r2, es3_sdrc_fix_sz
231 mov r2, r2, ror #2
232 copy_to_sram:
233 ldmia r0!, {r3} @ val = *src
234 stmia r1!, {r3} @ *dst = val
235 subs r2, r2, #0x1 @ num_words--
236 bne copy_to_sram
237 ldr r1, sram_base
238 blx r1
239 b restore
240
241 restore_3630:
242 ldr r1, pm_prepwstst_core_p
243 ldr r2, [r1]
244 and r2, r2, #0x3
245 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
246 bne restore
247 /* Disable RTA before giving control */
248 ldr r1, control_mem_rta
249 mov r2, #OMAP36XX_RTA_DISABLE
250 str r2, [r1]
251 /* Fall thru for the remaining logic */
252 restore:
253 /* Check what was the reason for mpu reset and store the reason in r9*/
254 /* 1 - Only L1 and logic lost */
255 /* 2 - Only L2 lost - In this case, we wont be here */
256 /* 3 - Both L1 and L2 lost */
257 ldr r1, pm_pwstctrl_mpu
258 ldr r2, [r1]
259 and r2, r2, #0x3
260 cmp r2, #0x0 @ Check if target power state was OFF or RET
261 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
262 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
263 bne logic_l1_restore
264
265 ldr r0, l2dis_3630
266 cmp r0, #0x1 @ should we disable L2 on 3630?
267 bne skipl2dis
268 mrc p15, 0, r0, c1, c0, 1
269 bic r0, r0, #2 @ disable L2 cache
270 mcr p15, 0, r0, c1, c0, 1
271 skipl2dis:
272 ldr r0, control_stat
273 ldr r1, [r0]
274 and r1, #0x700
275 cmp r1, #0x300
276 beq l2_inv_gp
277 mov r0, #40 @ set service ID for PPA
278 mov r12, r0 @ copy secure Service ID in r12
279 mov r1, #0 @ set task id for ROM code in r1
280 mov r2, #4 @ set some flags in r2, r6
281 mov r6, #0xff
282 adr r3, l2_inv_api_params @ r3 points to dummy parameters
283 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
284 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
285 .word 0xE1600071 @ call SMI monitor (smi #1)
286 /* Write to Aux control register to set some bits */
287 mov r0, #42 @ set service ID for PPA
288 mov r12, r0 @ copy secure Service ID in r12
289 mov r1, #0 @ set task id for ROM code in r1
290 mov r2, #4 @ set some flags in r2, r6
291 mov r6, #0xff
292 ldr r4, scratchpad_base
293 ldr r3, [r4, #0xBC] @ r3 points to parameters
294 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
295 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
296 .word 0xE1600071 @ call SMI monitor (smi #1)
297
298 #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
299 /* Restore L2 aux control register */
300 @ set service ID for PPA
301 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
302 mov r12, r0 @ copy service ID in r12
303 mov r1, #0 @ set task ID for ROM code in r1
304 mov r2, #4 @ set some flags in r2, r6
305 mov r6, #0xff
306 ldr r4, scratchpad_base
307 ldr r3, [r4, #0xBC]
308 adds r3, r3, #8 @ r3 points to parameters
309 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
310 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
311 .word 0xE1600071 @ call SMI monitor (smi #1)
312 #endif
313 b logic_l1_restore
314 l2_inv_api_params:
315 .word 0x1, 0x00
316 l2_inv_gp:
317 /* Execute smi to invalidate L2 cache */
318 mov r12, #0x1 @ set up to invalide L2
319 smi: .word 0xE1600070 @ Call SMI monitor (smieq)
320 /* Write to Aux control register to set some bits */
321 ldr r4, scratchpad_base
322 ldr r3, [r4,#0xBC]
323 ldr r0, [r3,#4]
324 mov r12, #0x3
325 .word 0xE1600070 @ Call SMI monitor (smieq)
326 ldr r4, scratchpad_base
327 ldr r3, [r4,#0xBC]
328 ldr r0, [r3,#12]
329 mov r12, #0x2
330 .word 0xE1600070 @ Call SMI monitor (smieq)
331 logic_l1_restore:
332 ldr r1, l2dis_3630
333 cmp r1, #0x1 @ Do we need to re-enable L2 on 3630?
334 bne skipl2reen
335 mrc p15, 0, r1, c1, c0, 1
336 orr r1, r1, #2 @ re-enable L2 cache
337 mcr p15, 0, r1, c1, c0, 1
338 skipl2reen:
339 mov r1, #0
340 /* Invalidate all instruction caches to PoU
341 * and flush branch target cache */
342 mcr p15, 0, r1, c7, c5, 0
343
344 ldr r4, scratchpad_base
345 ldr r3, [r4,#0xBC]
346 adds r3, r3, #16
347 ldmia r3!, {r4-r6}
348 mov sp, r4
349 msr spsr_cxsf, r5
350 mov lr, r6
351
352 ldmia r3!, {r4-r9}
353 /* Coprocessor access Control Register */
354 mcr p15, 0, r4, c1, c0, 2
355
356 /* TTBR0 */
357 MCR p15, 0, r5, c2, c0, 0
358 /* TTBR1 */
359 MCR p15, 0, r6, c2, c0, 1
360 /* Translation table base control register */
361 MCR p15, 0, r7, c2, c0, 2
362 /*domain access Control Register */
363 MCR p15, 0, r8, c3, c0, 0
364 /* data fault status Register */
365 MCR p15, 0, r9, c5, c0, 0
366
367 ldmia r3!,{r4-r8}
368 /* instruction fault status Register */
369 MCR p15, 0, r4, c5, c0, 1
370 /*Data Auxiliary Fault Status Register */
371 MCR p15, 0, r5, c5, c1, 0
372 /*Instruction Auxiliary Fault Status Register*/
373 MCR p15, 0, r6, c5, c1, 1
374 /*Data Fault Address Register */
375 MCR p15, 0, r7, c6, c0, 0
376 /*Instruction Fault Address Register*/
377 MCR p15, 0, r8, c6, c0, 2
378 ldmia r3!,{r4-r7}
379
380 /* user r/w thread and process ID */
381 MCR p15, 0, r4, c13, c0, 2
382 /* user ro thread and process ID */
383 MCR p15, 0, r5, c13, c0, 3
384 /*Privileged only thread and process ID */
385 MCR p15, 0, r6, c13, c0, 4
386 /* cache size selection */
387 MCR p15, 2, r7, c0, c0, 0
388 ldmia r3!,{r4-r8}
389 /* Data TLB lockdown registers */
390 MCR p15, 0, r4, c10, c0, 0
391 /* Instruction TLB lockdown registers */
392 MCR p15, 0, r5, c10, c0, 1
393 /* Secure or Nonsecure Vector Base Address */
394 MCR p15, 0, r6, c12, c0, 0
395 /* FCSE PID */
396 MCR p15, 0, r7, c13, c0, 0
397 /* Context PID */
398 MCR p15, 0, r8, c13, c0, 1
399
400 ldmia r3!,{r4-r5}
401 /* primary memory remap register */
402 MCR p15, 0, r4, c10, c2, 0
403 /*normal memory remap register */
404 MCR p15, 0, r5, c10, c2, 1
405
406 /* Restore cpsr */
407 ldmia r3!,{r4} /*load CPSR from SDRAM*/
408 msr cpsr, r4 /*store cpsr */
409
410 /* Enabling MMU here */
411 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
412 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
413 and r7, #0x7
414 cmp r7, #0x0
415 beq usettbr0
416 ttbr_error:
417 /* More work needs to be done to support N[0:2] value other than 0
418 * So looping here so that the error can be detected
419 */
420 b ttbr_error
421 usettbr0:
422 mrc p15, 0, r2, c2, c0, 0
423 ldr r5, ttbrbit_mask
424 and r2, r5
425 mov r4, pc
426 ldr r5, table_index_mask
427 and r4, r5 /* r4 = 31 to 20 bits of pc */
428 /* Extract the value to be written to table entry */
429 ldr r1, table_entry
430 add r1, r1, r4 /* r1 has value to be written to table entry*/
431 /* Getting the address of table entry to modify */
432 lsr r4, #18
433 add r2, r4 /* r2 has the location which needs to be modified */
434 /* Storing previous entry of location being modified */
435 ldr r5, scratchpad_base
436 ldr r4, [r2]
437 str r4, [r5, #0xC0]
438 /* Modify the table entry */
439 str r1, [r2]
440 /* Storing address of entry being modified
441 * - will be restored after enabling MMU */
442 ldr r5, scratchpad_base
443 str r2, [r5, #0xC4]
444
445 mov r0, #0
446 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
447 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
448 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
449 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
450 /* Restore control register but dont enable caches here*/
451 /* Caches will be enabled after restoring MMU table entry */
452 ldmia r3!, {r4}
453 /* Store previous value of control register in scratchpad */
454 str r4, [r5, #0xC8]
455 ldr r2, cache_pred_disable_mask
456 and r4, r2
457 mcr p15, 0, r4, c1, c0, 0
458
459 ldmfd sp!, {r0-r12, pc} @ restore regs and return
460 save_context_wfi:
461 mov r8, r0 /* Store SDRAM address in r8 */
462 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
463 mov r4, #0x1 @ Number of parameters for restore call
464 stmia r8!, {r4-r5} @ Push parameters for restore call
465 mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
466 stmia r8!, {r4-r5} @ Push parameters for restore call
467 /* Check what that target sleep state is:stored in r1*/
468 /* 1 - Only L1 and logic lost */
469 /* 2 - Only L2 lost */
470 /* 3 - Both L1 and L2 lost */
471 cmp r1, #0x2 /* Only L2 lost */
472 beq clean_l2
473 cmp r1, #0x1 /* L2 retained */
474 /* r9 stores whether to clean L2 or not*/
475 moveq r9, #0x0 /* Dont Clean L2 */
476 movne r9, #0x1 /* Clean L2 */
477 l1_logic_lost:
478 /* Store sp and spsr to SDRAM */
479 mov r4, sp
480 mrs r5, spsr
481 mov r6, lr
482 stmia r8!, {r4-r6}
483 /* Save all ARM registers */
484 /* Coprocessor access control register */
485 mrc p15, 0, r6, c1, c0, 2
486 stmia r8!, {r6}
487 /* TTBR0, TTBR1 and Translation table base control */
488 mrc p15, 0, r4, c2, c0, 0
489 mrc p15, 0, r5, c2, c0, 1
490 mrc p15, 0, r6, c2, c0, 2
491 stmia r8!, {r4-r6}
492 /* Domain access control register, data fault status register,
493 and instruction fault status register */
494 mrc p15, 0, r4, c3, c0, 0
495 mrc p15, 0, r5, c5, c0, 0
496 mrc p15, 0, r6, c5, c0, 1
497 stmia r8!, {r4-r6}
498 /* Data aux fault status register, instruction aux fault status,
499 datat fault address register and instruction fault address register*/
500 mrc p15, 0, r4, c5, c1, 0
501 mrc p15, 0, r5, c5, c1, 1
502 mrc p15, 0, r6, c6, c0, 0
503 mrc p15, 0, r7, c6, c0, 2
504 stmia r8!, {r4-r7}
505 /* user r/w thread and process ID, user r/o thread and process ID,
506 priv only thread and process ID, cache size selection */
507 mrc p15, 0, r4, c13, c0, 2
508 mrc p15, 0, r5, c13, c0, 3
509 mrc p15, 0, r6, c13, c0, 4
510 mrc p15, 2, r7, c0, c0, 0
511 stmia r8!, {r4-r7}
512 /* Data TLB lockdown, instruction TLB lockdown registers */
513 mrc p15, 0, r5, c10, c0, 0
514 mrc p15, 0, r6, c10, c0, 1
515 stmia r8!, {r5-r6}
516 /* Secure or non secure vector base address, FCSE PID, Context PID*/
517 mrc p15, 0, r4, c12, c0, 0
518 mrc p15, 0, r5, c13, c0, 0
519 mrc p15, 0, r6, c13, c0, 1
520 stmia r8!, {r4-r6}
521 /* Primary remap, normal remap registers */
522 mrc p15, 0, r4, c10, c2, 0
523 mrc p15, 0, r5, c10, c2, 1
524 stmia r8!,{r4-r5}
525
526 /* Store current cpsr*/
527 mrs r2, cpsr
528 stmia r8!, {r2}
529
530 mrc p15, 0, r4, c1, c0, 0
531 /* save control register */
532 stmia r8!, {r4}
533 clean_caches:
534 /* Clean Data or unified cache to POU*/
535 /* How to invalidate only L1 cache???? - #FIX_ME# */
536 /* mcr p15, 0, r11, c7, c11, 1 */
537 cmp r9, #1 /* Check whether L2 inval is required or not*/
538 bne skip_l2_inval
539 clean_l2:
540 /*
541 * Jump out to kernel flush routine
542 * - reuse that code is better
543 * - it executes in a cached space so is faster than refetch per-block
544 * - should be faster and will change with kernel
545 * - 'might' have to copy address, load and jump to it
546 * - lr is used since we are running in SRAM currently.
547 */
548 ldr r1, kernel_flush
549 mov lr, pc
550 bx r1
551
552 skip_l2_inval:
553 /* Data memory barrier and Data sync barrier */
554 mov r1, #0
555 mcr p15, 0, r1, c7, c10, 4
556 mcr p15, 0, r1, c7, c10, 5
557
558 wfi @ wait for interrupt
559 nop
560 nop
561 nop
562 nop
563 nop
564 nop
565 nop
566 nop
567 nop
568 nop
569 bl wait_sdrc_ok
570 /* restore regs and return */
571 ldmfd sp!, {r0-r12, pc}
572
573 /* Make sure SDRC accesses are ok */
574 wait_sdrc_ok:
575
576 /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */
577 ldr r4, cm_idlest_ckgen
578 wait_dpll3_lock:
579 ldr r5, [r4]
580 tst r5, #1
581 beq wait_dpll3_lock
582
583 ldr r4, cm_idlest1_core
584 wait_sdrc_ready:
585 ldr r5, [r4]
586 tst r5, #0x2
587 bne wait_sdrc_ready
588 /* allow DLL powerdown upon hw idle req */
589 ldr r4, sdrc_power
590 ldr r5, [r4]
591 bic r5, r5, #0x40
592 str r5, [r4]
593 is_dll_in_lock_mode:
594
595 /* Is dll in lock mode? */
596 ldr r4, sdrc_dlla_ctrl
597 ldr r5, [r4]
598 tst r5, #0x4
599 bxne lr
600 /* wait till dll locks */
601 wait_dll_lock_timed:
602 ldr r4, wait_dll_lock_counter
603 add r4, r4, #1
604 str r4, wait_dll_lock_counter
605 ldr r4, sdrc_dlla_status
606 mov r6, #8 /* Wait 20uS for lock */
607 wait_dll_lock:
608 subs r6, r6, #0x1
609 beq kick_dll
610 ldr r5, [r4]
611 and r5, r5, #0x4
612 cmp r5, #0x4
613 bne wait_dll_lock
614 bx lr
615
616 /* disable/reenable DLL if not locked */
617 kick_dll:
618 ldr r4, sdrc_dlla_ctrl
619 ldr r5, [r4]
620 mov r6, r5
621 bic r6, #(1<<3) /* disable dll */
622 str r6, [r4]
623 dsb
624 orr r6, r6, #(1<<3) /* enable dll */
625 str r6, [r4]
626 dsb
627 ldr r4, kick_counter
628 add r4, r4, #1
629 str r4, kick_counter
630 b wait_dll_lock_timed
631
632 cm_idlest1_core:
633 .word CM_IDLEST1_CORE_V
634 cm_idlest_ckgen:
635 .word CM_IDLEST_CKGEN_V
636 sdrc_dlla_status:
637 .word SDRC_DLLA_STATUS_V
638 sdrc_dlla_ctrl:
639 .word SDRC_DLLA_CTRL_V
640 pm_prepwstst_core_p:
641 .word PM_PREPWSTST_CORE_P
642 pm_pwstctrl_mpu:
643 .word PM_PWSTCTRL_MPU_P
644 scratchpad_base:
645 .word SCRATCHPAD_BASE_P
646 sram_base:
647 .word SRAM_BASE_P + 0x8000
648 sdrc_power:
649 .word SDRC_POWER_V
650 ttbrbit_mask:
651 .word 0xFFFFC000
652 table_index_mask:
653 .word 0xFFF00000
654 table_entry:
655 .word 0x00000C02
656 cache_pred_disable_mask:
657 .word 0xFFFFE7FB
658 control_stat:
659 .word CONTROL_STAT
660 control_mem_rta:
661 .word CONTROL_MEM_RTA_CTRL
662 kernel_flush:
663 .word v7_flush_dcache_all
664 l2dis_3630:
665 .word 0
666 /*
667 * When exporting to userspace while the counters are in SRAM,
668 * these 2 words need to be at the end to facilitate retrival!
669 */
670 kick_counter:
671 .word 0
672 wait_dll_lock_counter:
673 .word 0
674 ENTRY(omap34xx_cpu_suspend_sz)
675 .word . - omap34xx_cpu_suspend