]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - arch/arm/mach-omap2/sleep34xx.S
OMAP2+: use global values for the SRAM PA addresses
[mirror_ubuntu-bionic-kernel.git] / arch / arm / mach-omap2 / sleep34xx.S
CommitLineData
8bd22949
KH
1/*
2 * linux/arch/arm/mach-omap2/sleep.S
3 *
4 * (C) Copyright 2007
5 * Texas Instruments
6 * Karthik Dasu <karthik-dp@ti.com>
7 *
8 * (C) Copyright 2004
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
16 *
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
20 * GNU General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 * MA 02111-1307 USA
26 */
27#include <linux/linkage.h>
28#include <asm/assembler.h>
b4b36fd9 29#include <plat/sram.h>
8bd22949 30#include <mach/io.h>
8bd22949 31
89139dce 32#include "cm.h"
8bd22949
KH
33#include "prm.h"
34#include "sdrc.h"
4814ced5 35#include "control.h"
8bd22949 36
a89b6f00
RN
37#define SDRC_SCRATCHPAD_SEM_V 0xfa00291c
38
0795a75a 39#define PM_PREPWSTST_CORE_P 0x48306AE8
37903009 40#define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
89139dce 41#define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
9d93b8a2 42#define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
27d59a4a
TK
43#define SRAM_BASE_P 0x40200000
44#define CONTROL_STAT 0x480022F0
458e999e
NM
45#define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\
46 + OMAP36XX_CONTROL_MEM_RTA_CTRL)
8bd22949
KH
47#define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
48 * available */
61255ab9
RN
49#define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
50 + SCRATCHPAD_MEM_OFFS)
8bd22949 51#define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
0795a75a
TK
52#define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
53#define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
54#define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
55#define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
56#define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
57#define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
58#define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
89139dce
PDS
59#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
60#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
8bd22949 61
d3cdfd2a
JP
62
63/*
64 * API functions
65 */
a89b6f00 66
8bd22949
KH
67 .text
68/* Function call to get the restore pointer for resume from OFF */
69ENTRY(get_restore_pointer)
70 stmfd sp!, {lr} @ save registers on stack
71 adr r0, restore
72 ldmfd sp!, {pc} @ restore regs and return
73ENTRY(get_restore_pointer_sz)
0795a75a 74 .word . - get_restore_pointer
458e999e
NM
75 .text
76/* Function call to get the restore pointer for 3630 resume from OFF */
77ENTRY(get_omap3630_restore_pointer)
78 stmfd sp!, {lr} @ save registers on stack
79 adr r0, restore_3630
80 ldmfd sp!, {pc} @ restore regs and return
81ENTRY(get_omap3630_restore_pointer_sz)
82 .word . - get_omap3630_restore_pointer
0795a75a 83
c4236d2e
PDS
84 .text
85/*
86 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
87 * This function sets up a fflag that will allow for this toggling to take
88 * place on 3630. Hopefully some version in the future maynot need this
89 */
90ENTRY(enable_omap3630_toggle_l2_on_restore)
91 stmfd sp!, {lr} @ save registers on stack
92 /* Setup so that we will disable and enable l2 */
93 mov r1, #0x1
94 str r1, l2dis_3630
95 ldmfd sp!, {pc} @ restore regs and return
96
0795a75a
TK
97 .text
98/* Function call to get the restore pointer for for ES3 to resume from OFF */
99ENTRY(get_es3_restore_pointer)
100 stmfd sp!, {lr} @ save registers on stack
101 adr r0, restore_es3
102 ldmfd sp!, {pc} @ restore regs and return
103ENTRY(get_es3_restore_pointer_sz)
104 .word . - get_es3_restore_pointer
105
106ENTRY(es3_sdrc_fix)
107 ldr r4, sdrc_syscfg @ get config addr
108 ldr r5, [r4] @ get value
109 tst r5, #0x100 @ is part access blocked
110 it eq
111 biceq r5, r5, #0x100 @ clear bit if set
112 str r5, [r4] @ write back change
113 ldr r4, sdrc_mr_0 @ get config addr
114 ldr r5, [r4] @ get value
115 str r5, [r4] @ write back change
116 ldr r4, sdrc_emr2_0 @ get config addr
117 ldr r5, [r4] @ get value
118 str r5, [r4] @ write back change
119 ldr r4, sdrc_manual_0 @ get config addr
120 mov r5, #0x2 @ autorefresh command
121 str r5, [r4] @ kick off refreshes
122 ldr r4, sdrc_mr_1 @ get config addr
123 ldr r5, [r4] @ get value
124 str r5, [r4] @ write back change
125 ldr r4, sdrc_emr2_1 @ get config addr
126 ldr r5, [r4] @ get value
127 str r5, [r4] @ write back change
128 ldr r4, sdrc_manual_1 @ get config addr
129 mov r5, #0x2 @ autorefresh command
130 str r5, [r4] @ kick off refreshes
131 bx lr
132sdrc_syscfg:
133 .word SDRC_SYSCONFIG_P
134sdrc_mr_0:
135 .word SDRC_MR_0_P
136sdrc_emr2_0:
137 .word SDRC_EMR2_0_P
138sdrc_manual_0:
139 .word SDRC_MANUAL_0_P
140sdrc_mr_1:
141 .word SDRC_MR_1_P
142sdrc_emr2_1:
143 .word SDRC_EMR2_1_P
144sdrc_manual_1:
145 .word SDRC_MANUAL_1_P
146ENTRY(es3_sdrc_fix_sz)
147 .word . - es3_sdrc_fix
27d59a4a
TK
148
149/* Function to call rom code to save secure ram context */
150ENTRY(save_secure_ram_context)
151 stmfd sp!, {r1-r12, lr} @ save registers on stack
d3cdfd2a 152
27d59a4a
TK
153 adr r3, api_params @ r3 points to parameters
154 str r0, [r3,#0x4] @ r0 has sdram address
155 ldr r12, high_mask
156 and r3, r3, r12
157 ldr r12, sram_phy_addr_mask
158 orr r3, r3, r12
159 mov r0, #25 @ set service ID for PPA
160 mov r12, r0 @ copy secure service ID in r12
161 mov r1, #0 @ set task id for ROM code in r1
ba50ea7e 162 mov r2, #4 @ set some flags in r2, r6
27d59a4a
TK
163 mov r6, #0xff
164 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
165 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
166 .word 0xE1600071 @ call SMI monitor (smi #1)
167 nop
168 nop
169 nop
170 nop
171 ldmfd sp!, {r1-r12, pc}
172sram_phy_addr_mask:
173 .word SRAM_BASE_P
174high_mask:
175 .word 0xffff
176api_params:
177 .word 0x4, 0x0, 0x0, 0x1, 0x1
178ENTRY(save_secure_ram_context_sz)
179 .word . - save_secure_ram_context
180
8bd22949
KH
181/*
182 * Forces OMAP into idle state
183 *
184 * omap34xx_suspend() - This bit of code just executes the WFI
185 * for normal idles.
186 *
187 * Note: This code get's copied to internal SRAM at boot. When the OMAP
188 * wakes up it continues execution at the point it went to sleep.
189 */
190ENTRY(omap34xx_cpu_suspend)
191 stmfd sp!, {r0-r12, lr} @ save registers on stack
d3cdfd2a 192
8bd22949
KH
193 /* r0 contains restore pointer in sdram */
194 /* r1 contains information about saving context */
195 ldr r4, sdrc_power @ read the SDRC_POWER register
196 ldr r5, [r4] @ read the contents of SDRC_POWER
197 orr r5, r5, #0x40 @ enable self refresh on idle req
198 str r5, [r4] @ write back to SDRC_POWER register
199
200 cmp r1, #0x0
201 /* If context save is required, do that and execute wfi */
202 bne save_context_wfi
203 /* Data memory barrier and Data sync barrier */
204 mov r1, #0
205 mcr p15, 0, r1, c7, c10, 4
206 mcr p15, 0, r1, c7, c10, 5
207
208 wfi @ wait for interrupt
209
210 nop
211 nop
212 nop
213 nop
214 nop
215 nop
216 nop
217 nop
218 nop
219 nop
89139dce 220 bl wait_sdrc_ok
8bd22949
KH
221
222 ldmfd sp!, {r0-r12, pc} @ restore regs and return
0795a75a 223restore_es3:
0795a75a
TK
224 ldr r5, pm_prepwstst_core_p
225 ldr r4, [r5]
226 and r4, r4, #0x3
227 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
228 bne restore
229 adr r0, es3_sdrc_fix
230 ldr r1, sram_base
231 ldr r2, es3_sdrc_fix_sz
232 mov r2, r2, ror #2
233copy_to_sram:
234 ldmia r0!, {r3} @ val = *src
235 stmia r1!, {r3} @ *dst = val
236 subs r2, r2, #0x1 @ num_words--
237 bne copy_to_sram
238 ldr r1, sram_base
239 blx r1
458e999e
NM
240 b restore
241
242restore_3630:
458e999e
NM
243 ldr r1, pm_prepwstst_core_p
244 ldr r2, [r1]
245 and r2, r2, #0x3
246 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
247 bne restore
248 /* Disable RTA before giving control */
249 ldr r1, control_mem_rta
250 mov r2, #OMAP36XX_RTA_DISABLE
251 str r2, [r1]
252 /* Fall thru for the remaining logic */
8bd22949 253restore:
8bd22949
KH
254 /* Check what was the reason for mpu reset and store the reason in r9*/
255 /* 1 - Only L1 and logic lost */
256 /* 2 - Only L2 lost - In this case, we wont be here */
257 /* 3 - Both L1 and L2 lost */
258 ldr r1, pm_pwstctrl_mpu
259 ldr r2, [r1]
260 and r2, r2, #0x3
261 cmp r2, #0x0 @ Check if target power state was OFF or RET
262 moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
263 movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
264 bne logic_l1_restore
c4236d2e
PDS
265
266 ldr r0, l2dis_3630
267 cmp r0, #0x1 @ should we disable L2 on 3630?
268 bne skipl2dis
269 mrc p15, 0, r0, c1, c0, 1
270 bic r0, r0, #2 @ disable L2 cache
271 mcr p15, 0, r0, c1, c0, 1
272skipl2dis:
27d59a4a
TK
273 ldr r0, control_stat
274 ldr r1, [r0]
275 and r1, #0x700
276 cmp r1, #0x300
277 beq l2_inv_gp
278 mov r0, #40 @ set service ID for PPA
279 mov r12, r0 @ copy secure Service ID in r12
280 mov r1, #0 @ set task id for ROM code in r1
281 mov r2, #4 @ set some flags in r2, r6
282 mov r6, #0xff
283 adr r3, l2_inv_api_params @ r3 points to dummy parameters
284 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
285 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
286 .word 0xE1600071 @ call SMI monitor (smi #1)
287 /* Write to Aux control register to set some bits */
288 mov r0, #42 @ set service ID for PPA
289 mov r12, r0 @ copy secure Service ID in r12
290 mov r1, #0 @ set task id for ROM code in r1
291 mov r2, #4 @ set some flags in r2, r6
292 mov r6, #0xff
a087cad9
TK
293 ldr r4, scratchpad_base
294 ldr r3, [r4, #0xBC] @ r3 points to parameters
27d59a4a
TK
295 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
296 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
297 .word 0xE1600071 @ call SMI monitor (smi #1)
298
79dcfdd4
TK
299#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
300 /* Restore L2 aux control register */
301 @ set service ID for PPA
302 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
303 mov r12, r0 @ copy service ID in r12
304 mov r1, #0 @ set task ID for ROM code in r1
305 mov r2, #4 @ set some flags in r2, r6
306 mov r6, #0xff
307 ldr r4, scratchpad_base
308 ldr r3, [r4, #0xBC]
309 adds r3, r3, #8 @ r3 points to parameters
310 mcr p15, 0, r0, c7, c10, 4 @ data write barrier
311 mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
312 .word 0xE1600071 @ call SMI monitor (smi #1)
313#endif
27d59a4a
TK
314 b logic_l1_restore
315l2_inv_api_params:
316 .word 0x1, 0x00
27d59a4a 317l2_inv_gp:
8bd22949
KH
318 /* Execute smi to invalidate L2 cache */
319 mov r12, #0x1 @ set up to invalide L2
27d59a4a
TK
320smi: .word 0xE1600070 @ Call SMI monitor (smieq)
321 /* Write to Aux control register to set some bits */
a087cad9
TK
322 ldr r4, scratchpad_base
323 ldr r3, [r4,#0xBC]
324 ldr r0, [r3,#4]
27d59a4a
TK
325 mov r12, #0x3
326 .word 0xE1600070 @ Call SMI monitor (smieq)
79dcfdd4
TK
327 ldr r4, scratchpad_base
328 ldr r3, [r4,#0xBC]
329 ldr r0, [r3,#12]
330 mov r12, #0x2
331 .word 0xE1600070 @ Call SMI monitor (smieq)
8bd22949 332logic_l1_restore:
c4236d2e
PDS
333 ldr r1, l2dis_3630
334 cmp r1, #0x1 @ Do we need to re-enable L2 on 3630?
335 bne skipl2reen
336 mrc p15, 0, r1, c1, c0, 1
337 orr r1, r1, #2 @ re-enable L2 cache
338 mcr p15, 0, r1, c1, c0, 1
339skipl2reen:
8bd22949
KH
340 mov r1, #0
341 /* Invalidate all instruction caches to PoU
342 * and flush branch target cache */
343 mcr p15, 0, r1, c7, c5, 0
344
345 ldr r4, scratchpad_base
346 ldr r3, [r4,#0xBC]
79dcfdd4 347 adds r3, r3, #16
8bd22949
KH
348 ldmia r3!, {r4-r6}
349 mov sp, r4
350 msr spsr_cxsf, r5
351 mov lr, r6
352
353 ldmia r3!, {r4-r9}
354 /* Coprocessor access Control Register */
355 mcr p15, 0, r4, c1, c0, 2
356
357 /* TTBR0 */
358 MCR p15, 0, r5, c2, c0, 0
359 /* TTBR1 */
360 MCR p15, 0, r6, c2, c0, 1
361 /* Translation table base control register */
362 MCR p15, 0, r7, c2, c0, 2
363 /*domain access Control Register */
364 MCR p15, 0, r8, c3, c0, 0
365 /* data fault status Register */
366 MCR p15, 0, r9, c5, c0, 0
367
368 ldmia r3!,{r4-r8}
369 /* instruction fault status Register */
370 MCR p15, 0, r4, c5, c0, 1
371 /*Data Auxiliary Fault Status Register */
372 MCR p15, 0, r5, c5, c1, 0
373 /*Instruction Auxiliary Fault Status Register*/
374 MCR p15, 0, r6, c5, c1, 1
375 /*Data Fault Address Register */
376 MCR p15, 0, r7, c6, c0, 0
377 /*Instruction Fault Address Register*/
378 MCR p15, 0, r8, c6, c0, 2
379 ldmia r3!,{r4-r7}
380
381 /* user r/w thread and process ID */
382 MCR p15, 0, r4, c13, c0, 2
383 /* user ro thread and process ID */
384 MCR p15, 0, r5, c13, c0, 3
385 /*Privileged only thread and process ID */
386 MCR p15, 0, r6, c13, c0, 4
387 /* cache size selection */
388 MCR p15, 2, r7, c0, c0, 0
389 ldmia r3!,{r4-r8}
390 /* Data TLB lockdown registers */
391 MCR p15, 0, r4, c10, c0, 0
392 /* Instruction TLB lockdown registers */
393 MCR p15, 0, r5, c10, c0, 1
394 /* Secure or Nonsecure Vector Base Address */
395 MCR p15, 0, r6, c12, c0, 0
396 /* FCSE PID */
397 MCR p15, 0, r7, c13, c0, 0
398 /* Context PID */
399 MCR p15, 0, r8, c13, c0, 1
400
401 ldmia r3!,{r4-r5}
402 /* primary memory remap register */
403 MCR p15, 0, r4, c10, c2, 0
404 /*normal memory remap register */
405 MCR p15, 0, r5, c10, c2, 1
406
407 /* Restore cpsr */
408 ldmia r3!,{r4} /*load CPSR from SDRAM*/
409 msr cpsr, r4 /*store cpsr */
410
411 /* Enabling MMU here */
412 mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
413 /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
414 and r7, #0x7
415 cmp r7, #0x0
416 beq usettbr0
417ttbr_error:
418 /* More work needs to be done to support N[0:2] value other than 0
419 * So looping here so that the error can be detected
420 */
421 b ttbr_error
422usettbr0:
423 mrc p15, 0, r2, c2, c0, 0
424 ldr r5, ttbrbit_mask
425 and r2, r5
426 mov r4, pc
427 ldr r5, table_index_mask
428 and r4, r5 /* r4 = 31 to 20 bits of pc */
429 /* Extract the value to be written to table entry */
430 ldr r1, table_entry
431 add r1, r1, r4 /* r1 has value to be written to table entry*/
432 /* Getting the address of table entry to modify */
433 lsr r4, #18
434 add r2, r4 /* r2 has the location which needs to be modified */
435 /* Storing previous entry of location being modified */
436 ldr r5, scratchpad_base
437 ldr r4, [r2]
438 str r4, [r5, #0xC0]
439 /* Modify the table entry */
440 str r1, [r2]
441 /* Storing address of entry being modified
442 * - will be restored after enabling MMU */
443 ldr r5, scratchpad_base
444 str r2, [r5, #0xC4]
445
446 mov r0, #0
447 mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
448 mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
449 mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
450 mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
451 /* Restore control register but dont enable caches here*/
452 /* Caches will be enabled after restoring MMU table entry */
453 ldmia r3!, {r4}
454 /* Store previous value of control register in scratchpad */
455 str r4, [r5, #0xC8]
456 ldr r2, cache_pred_disable_mask
457 and r4, r2
458 mcr p15, 0, r4, c1, c0, 0
459
460 ldmfd sp!, {r0-r12, pc} @ restore regs and return
461save_context_wfi:
8bd22949 462 mov r8, r0 /* Store SDRAM address in r8 */
a087cad9
TK
463 mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
464 mov r4, #0x1 @ Number of parameters for restore call
79dcfdd4
TK
465 stmia r8!, {r4-r5} @ Push parameters for restore call
466 mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
467 stmia r8!, {r4-r5} @ Push parameters for restore call
8bd22949
KH
468 /* Check what that target sleep state is:stored in r1*/
469 /* 1 - Only L1 and logic lost */
470 /* 2 - Only L2 lost */
471 /* 3 - Both L1 and L2 lost */
472 cmp r1, #0x2 /* Only L2 lost */
473 beq clean_l2
474 cmp r1, #0x1 /* L2 retained */
475 /* r9 stores whether to clean L2 or not*/
476 moveq r9, #0x0 /* Dont Clean L2 */
477 movne r9, #0x1 /* Clean L2 */
478l1_logic_lost:
479 /* Store sp and spsr to SDRAM */
480 mov r4, sp
481 mrs r5, spsr
482 mov r6, lr
483 stmia r8!, {r4-r6}
484 /* Save all ARM registers */
485 /* Coprocessor access control register */
486 mrc p15, 0, r6, c1, c0, 2
487 stmia r8!, {r6}
488 /* TTBR0, TTBR1 and Translation table base control */
489 mrc p15, 0, r4, c2, c0, 0
490 mrc p15, 0, r5, c2, c0, 1
491 mrc p15, 0, r6, c2, c0, 2
492 stmia r8!, {r4-r6}
493 /* Domain access control register, data fault status register,
494 and instruction fault status register */
495 mrc p15, 0, r4, c3, c0, 0
496 mrc p15, 0, r5, c5, c0, 0
497 mrc p15, 0, r6, c5, c0, 1
498 stmia r8!, {r4-r6}
499 /* Data aux fault status register, instruction aux fault status,
500 datat fault address register and instruction fault address register*/
501 mrc p15, 0, r4, c5, c1, 0
502 mrc p15, 0, r5, c5, c1, 1
503 mrc p15, 0, r6, c6, c0, 0
504 mrc p15, 0, r7, c6, c0, 2
505 stmia r8!, {r4-r7}
506 /* user r/w thread and process ID, user r/o thread and process ID,
507 priv only thread and process ID, cache size selection */
508 mrc p15, 0, r4, c13, c0, 2
509 mrc p15, 0, r5, c13, c0, 3
510 mrc p15, 0, r6, c13, c0, 4
511 mrc p15, 2, r7, c0, c0, 0
512 stmia r8!, {r4-r7}
513 /* Data TLB lockdown, instruction TLB lockdown registers */
514 mrc p15, 0, r5, c10, c0, 0
515 mrc p15, 0, r6, c10, c0, 1
516 stmia r8!, {r5-r6}
517 /* Secure or non secure vector base address, FCSE PID, Context PID*/
518 mrc p15, 0, r4, c12, c0, 0
519 mrc p15, 0, r5, c13, c0, 0
520 mrc p15, 0, r6, c13, c0, 1
521 stmia r8!, {r4-r6}
522 /* Primary remap, normal remap registers */
523 mrc p15, 0, r4, c10, c2, 0
524 mrc p15, 0, r5, c10, c2, 1
525 stmia r8!,{r4-r5}
526
527 /* Store current cpsr*/
528 mrs r2, cpsr
529 stmia r8!, {r2}
530
531 mrc p15, 0, r4, c1, c0, 0
532 /* save control register */
533 stmia r8!, {r4}
534clean_caches:
535 /* Clean Data or unified cache to POU*/
536 /* How to invalidate only L1 cache???? - #FIX_ME# */
537 /* mcr p15, 0, r11, c7, c11, 1 */
538 cmp r9, #1 /* Check whether L2 inval is required or not*/
539 bne skip_l2_inval
540clean_l2:
0bd40535
RW
541 /*
542 * Jump out to kernel flush routine
543 * - reuse that code is better
544 * - it executes in a cached space so is faster than refetch per-block
545 * - should be faster and will change with kernel
546 * - 'might' have to copy address, load and jump to it
547 * - lr is used since we are running in SRAM currently.
548 */
549 ldr r1, kernel_flush
550 mov lr, pc
551 bx r1
552
8bd22949
KH
553skip_l2_inval:
554 /* Data memory barrier and Data sync barrier */
555 mov r1, #0
556 mcr p15, 0, r1, c7, c10, 4
557 mcr p15, 0, r1, c7, c10, 5
558
559 wfi @ wait for interrupt
560 nop
561 nop
562 nop
563 nop
564 nop
565 nop
566 nop
567 nop
568 nop
569 nop
89139dce 570 bl wait_sdrc_ok
8bd22949
KH
571 /* restore regs and return */
572 ldmfd sp!, {r0-r12, pc}
573
89139dce
PDS
574/* Make sure SDRC accesses are ok */
575wait_sdrc_ok:
9d93b8a2
PDS
576
577/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */
578 ldr r4, cm_idlest_ckgen
579wait_dpll3_lock:
580 ldr r5, [r4]
581 tst r5, #1
582 beq wait_dpll3_lock
583
89139dce 584 ldr r4, cm_idlest1_core
9d93b8a2 585wait_sdrc_ready:
89139dce 586 ldr r5, [r4]
9d93b8a2
PDS
587 tst r5, #0x2
588 bne wait_sdrc_ready
589 /* allow DLL powerdown upon hw idle req */
89139dce
PDS
590 ldr r4, sdrc_power
591 ldr r5, [r4]
592 bic r5, r5, #0x40
593 str r5, [r4]
9d93b8a2
PDS
594is_dll_in_lock_mode:
595
89139dce
PDS
596 /* Is dll in lock mode? */
597 ldr r4, sdrc_dlla_ctrl
598 ldr r5, [r4]
599 tst r5, #0x4
600 bxne lr
601 /* wait till dll locks */
9d93b8a2
PDS
602wait_dll_lock_timed:
603 ldr r4, wait_dll_lock_counter
604 add r4, r4, #1
605 str r4, wait_dll_lock_counter
606 ldr r4, sdrc_dlla_status
607 mov r6, #8 /* Wait 20uS for lock */
608wait_dll_lock:
609 subs r6, r6, #0x1
610 beq kick_dll
89139dce
PDS
611 ldr r5, [r4]
612 and r5, r5, #0x4
613 cmp r5, #0x4
614 bne wait_dll_lock
615 bx lr
8bd22949 616
9d93b8a2
PDS
617 /* disable/reenable DLL if not locked */
618kick_dll:
619 ldr r4, sdrc_dlla_ctrl
620 ldr r5, [r4]
621 mov r6, r5
622 bic r6, #(1<<3) /* disable dll */
623 str r6, [r4]
624 dsb
625 orr r6, r6, #(1<<3) /* enable dll */
626 str r6, [r4]
627 dsb
628 ldr r4, kick_counter
629 add r4, r4, #1
630 str r4, kick_counter
631 b wait_dll_lock_timed
632
89139dce
PDS
633cm_idlest1_core:
634 .word CM_IDLEST1_CORE_V
9d93b8a2
PDS
635cm_idlest_ckgen:
636 .word CM_IDLEST_CKGEN_V
89139dce
PDS
637sdrc_dlla_status:
638 .word SDRC_DLLA_STATUS_V
639sdrc_dlla_ctrl:
640 .word SDRC_DLLA_CTRL_V
0795a75a
TK
641pm_prepwstst_core_p:
642 .word PM_PREPWSTST_CORE_P
8bd22949
KH
643pm_pwstctrl_mpu:
644 .word PM_PWSTCTRL_MPU_P
645scratchpad_base:
646 .word SCRATCHPAD_BASE_P
0795a75a
TK
647sram_base:
648 .word SRAM_BASE_P + 0x8000
8bd22949
KH
649sdrc_power:
650 .word SDRC_POWER_V
8bd22949
KH
651ttbrbit_mask:
652 .word 0xFFFFC000
653table_index_mask:
654 .word 0xFFF00000
655table_entry:
656 .word 0x00000C02
657cache_pred_disable_mask:
658 .word 0xFFFFE7FB
27d59a4a
TK
659control_stat:
660 .word CONTROL_STAT
458e999e
NM
661control_mem_rta:
662 .word CONTROL_MEM_RTA_CTRL
0bd40535
RW
663kernel_flush:
664 .word v7_flush_dcache_all
c4236d2e
PDS
665l2dis_3630:
666 .word 0
9d93b8a2
PDS
667 /*
668 * When exporting to userspace while the counters are in SRAM,
669 * these 2 words need to be at the end to facilitate retrival!
670 */
671kick_counter:
672 .word 0
673wait_dll_lock_counter:
674 .word 0
8bd22949
KH
675ENTRY(omap34xx_cpu_suspend_sz)
676 .word . - omap34xx_cpu_suspend