]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - arch/arm/mach-omap2/sleep34xx.S
Merge branch 'timers-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel...
[mirror_ubuntu-jammy-kernel.git] / arch / arm / mach-omap2 / sleep34xx.S
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * (C) Copyright 2007
4 * Texas Instruments
5 * Karthik Dasu <karthik-dp@ti.com>
6 *
7 * (C) Copyright 2004
8 * Texas Instruments, <www.ti.com>
9 * Richard Woodruff <r-woodruff2@ti.com>
10 */
11 #include <linux/linkage.h>
12
13 #include <asm/assembler.h>
14
15 #include "omap34xx.h"
16 #include "iomap.h"
17 #include "cm3xxx.h"
18 #include "prm3xxx.h"
19 #include "sdrc.h"
20 #include "sram.h"
21 #include "control.h"
22
23 /*
24 * Registers access definitions
25 */
26 #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
27 #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
28 (SDRC_SCRATCHPAD_SEM_OFFS)
29 #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
30 OMAP3430_PM_PREPWSTST
31 #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
32 #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
33 #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
34 #define SRAM_BASE_P OMAP3_SRAM_PA
35 #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
36 #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
37 OMAP36XX_CONTROL_MEM_RTA_CTRL)
38
39 /* Move this as correct place is available */
40 #define SCRATCHPAD_MEM_OFFS 0x310
41 #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
42 OMAP343X_CONTROL_MEM_WKUP +\
43 SCRATCHPAD_MEM_OFFS)
44 #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
45 #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
46 #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
47 #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
48 #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
49 #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
50 #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
51 #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
52 #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
53 #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
54
55 /*
56 * This file needs be built unconditionally as ARM to interoperate correctly
57 * with non-Thumb-2-capable firmware.
58 */
59 .arm
60
61 /*
62 * API functions
63 */
64
65 .text
66 /*
67 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
68 * This function sets up a flag that will allow for this toggling to take
69 * place on 3630. Hopefully some version in the future may not need this.
70 */
71 ENTRY(enable_omap3630_toggle_l2_on_restore)
72 stmfd sp!, {lr} @ save registers on stack
73 /* Setup so that we will disable and enable l2 */
74 mov r1, #0x1
75 adrl r3, l2dis_3630_offset @ may be too distant for plain adr
76 ldr r2, [r3] @ value for offset
77 str r1, [r2, r3] @ write to l2dis_3630
78 ldmfd sp!, {pc} @ restore regs and return
79 ENDPROC(enable_omap3630_toggle_l2_on_restore)
80
81 /*
82 * Function to call rom code to save secure ram context.
83 *
84 * r0 = physical address of the parameters
85 */
86 ENTRY(save_secure_ram_context)
87 stmfd sp!, {r4 - r11, lr} @ save registers on stack
88 mov r3, r0 @ physical address of parameters
89 mov r0, #25 @ set service ID for PPA
90 mov r12, r0 @ copy secure service ID in r12
91 mov r1, #0 @ set task id for ROM code in r1
92 mov r2, #4 @ set some flags in r2, r6
93 mov r6, #0xff
94 dsb @ data write barrier
95 dmb @ data memory barrier
96 smc #1 @ call SMI monitor (smi #1)
97 nop
98 nop
99 nop
100 nop
101 ldmfd sp!, {r4 - r11, pc}
102 ENDPROC(save_secure_ram_context)
103
104 /*
105 * ======================
106 * == Idle entry point ==
107 * ======================
108 */
109
110 /*
111 * Forces OMAP into idle state
112 *
113 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
114 * and executes the WFI instruction. Calling WFI effectively changes the
115 * power domains states to the desired target power states.
116 *
117 *
118 * Notes:
119 * - only the minimum set of functions gets copied to internal SRAM at boot
120 * and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
121 * pointers in SDRAM or SRAM are called depending on the desired low power
122 * target state.
123 * - when the OMAP wakes up it continues at different execution points
124 * depending on the low power mode (non-OFF vs OFF modes),
125 * cf. 'Resume path for xxx mode' comments.
126 */
127 .align 3
128 ENTRY(omap34xx_cpu_suspend)
129 stmfd sp!, {r4 - r11, lr} @ save registers on stack
130
131 /*
132 * r0 contains information about saving context:
133 * 0 - No context lost
134 * 1 - Only L1 and logic lost
135 * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
136 * 3 - Both L1 and L2 lost and logic lost
137 */
138
139 /*
140 * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
141 * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
142 */
143 ldr r4, omap3_do_wfi_sram_addr
144 ldr r5, [r4]
145 cmp r0, #0x0 @ If no context save required,
146 bxeq r5 @ jump to the WFI code in SRAM
147
148
149 /* Otherwise fall through to the save context code */
150 save_context_wfi:
151 /*
152 * jump out to kernel flush routine
153 * - reuse that code is better
154 * - it executes in a cached space so is faster than refetch per-block
155 * - should be faster and will change with kernel
156 * - 'might' have to copy address, load and jump to it
157 * Flush all data from the L1 data cache before disabling
158 * SCTLR.C bit.
159 */
160 ldr r1, kernel_flush
161 mov lr, pc
162 bx r1
163
164 /*
165 * Clear the SCTLR.C bit to prevent further data cache
166 * allocation. Clearing SCTLR.C would make all the data accesses
167 * strongly ordered and would not hit the cache.
168 */
169 mrc p15, 0, r0, c1, c0, 0
170 bic r0, r0, #(1 << 2) @ Disable the C bit
171 mcr p15, 0, r0, c1, c0, 0
172 isb
173
174 /*
175 * Invalidate L1 data cache. Even though only invalidate is
176 * necessary exported flush API is used here. Doing clean
177 * on already clean cache would be almost NOP.
178 */
179 ldr r1, kernel_flush
180 blx r1
181 b omap3_do_wfi
182 ENDPROC(omap34xx_cpu_suspend)
183 omap3_do_wfi_sram_addr:
184 .word omap3_do_wfi_sram
185 kernel_flush:
186 .word v7_flush_dcache_all
187
188 /* ===================================
189 * == WFI instruction => Enter idle ==
190 * ===================================
191 */
192
193 /*
194 * Do WFI instruction
195 * Includes the resume path for non-OFF modes
196 *
197 * This code gets copied to internal SRAM and is accessible
198 * from both SDRAM and SRAM:
199 * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
200 * - executed from SDRAM for OFF mode (omap3_do_wfi).
201 */
202 .align 3
203 ENTRY(omap3_do_wfi)
204 ldr r4, sdrc_power @ read the SDRC_POWER register
205 ldr r5, [r4] @ read the contents of SDRC_POWER
206 orr r5, r5, #0x40 @ enable self refresh on idle req
207 str r5, [r4] @ write back to SDRC_POWER register
208
209 /* Data memory barrier and Data sync barrier */
210 dsb
211 dmb
212
213 /*
214 * ===================================
215 * == WFI instruction => Enter idle ==
216 * ===================================
217 */
218 wfi @ wait for interrupt
219
220 /*
221 * ===================================
222 * == Resume path for non-OFF modes ==
223 * ===================================
224 */
225 nop
226 nop
227 nop
228 nop
229 nop
230 nop
231 nop
232 nop
233 nop
234 nop
235
236 /*
237 * This function implements the erratum ID i581 WA:
238 * SDRC state restore before accessing the SDRAM
239 *
240 * Only used at return from non-OFF mode. For OFF
241 * mode the ROM code configures the SDRC and
242 * the DPLL before calling the restore code directly
243 * from DDR.
244 */
245
246 /* Make sure SDRC accesses are ok */
247 wait_sdrc_ok:
248
249 /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
250 ldr r4, cm_idlest_ckgen
251 wait_dpll3_lock:
252 ldr r5, [r4]
253 tst r5, #1
254 beq wait_dpll3_lock
255
256 ldr r4, cm_idlest1_core
257 wait_sdrc_ready:
258 ldr r5, [r4]
259 tst r5, #0x2
260 bne wait_sdrc_ready
261 /* allow DLL powerdown upon hw idle req */
262 ldr r4, sdrc_power
263 ldr r5, [r4]
264 bic r5, r5, #0x40
265 str r5, [r4]
266
267 is_dll_in_lock_mode:
268 /* Is dll in lock mode? */
269 ldr r4, sdrc_dlla_ctrl
270 ldr r5, [r4]
271 tst r5, #0x4
272 bne exit_nonoff_modes @ Return if locked
273 /* wait till dll locks */
274 wait_dll_lock_timed:
275 ldr r4, sdrc_dlla_status
276 /* Wait 20uS for lock */
277 mov r6, #8
278 wait_dll_lock:
279 subs r6, r6, #0x1
280 beq kick_dll
281 ldr r5, [r4]
282 and r5, r5, #0x4
283 cmp r5, #0x4
284 bne wait_dll_lock
285 b exit_nonoff_modes @ Return when locked
286
287 /* disable/reenable DLL if not locked */
288 kick_dll:
289 ldr r4, sdrc_dlla_ctrl
290 ldr r5, [r4]
291 mov r6, r5
292 bic r6, #(1<<3) @ disable dll
293 str r6, [r4]
294 dsb
295 orr r6, r6, #(1<<3) @ enable dll
296 str r6, [r4]
297 dsb
298 b wait_dll_lock_timed
299
300 exit_nonoff_modes:
301 /* Re-enable C-bit if needed */
302 mrc p15, 0, r0, c1, c0, 0
303 tst r0, #(1 << 2) @ Check C bit enabled?
304 orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
305 mcreq p15, 0, r0, c1, c0, 0
306 isb
307
308 /*
309 * ===================================
310 * == Exit point from non-OFF modes ==
311 * ===================================
312 */
313 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
314 ENDPROC(omap3_do_wfi)
315 sdrc_power:
316 .word SDRC_POWER_V
317 cm_idlest1_core:
318 .word CM_IDLEST1_CORE_V
319 cm_idlest_ckgen:
320 .word CM_IDLEST_CKGEN_V
321 sdrc_dlla_status:
322 .word SDRC_DLLA_STATUS_V
323 sdrc_dlla_ctrl:
324 .word SDRC_DLLA_CTRL_V
325 ENTRY(omap3_do_wfi_sz)
326 .word . - omap3_do_wfi
327
328
329 /*
330 * ==============================
331 * == Resume path for OFF mode ==
332 * ==============================
333 */
334
335 /*
336 * The restore_* functions are called by the ROM code
337 * when back from WFI in OFF mode.
338 * Cf. the get_*restore_pointer functions.
339 *
340 * restore_es3: applies to 34xx >= ES3.0
341 * restore_3630: applies to 36xx
342 * restore: common code for 3xxx
343 *
344 * Note: when back from CORE and MPU OFF mode we are running
345 * from SDRAM, without MMU, without the caches and prediction.
346 * Also the SRAM content has been cleared.
347 */
348 ENTRY(omap3_restore_es3)
349 ldr r5, pm_prepwstst_core_p
350 ldr r4, [r5]
351 and r4, r4, #0x3
352 cmp r4, #0x0 @ Check if previous power state of CORE is OFF
353 bne omap3_restore @ Fall through to OMAP3 common code
354 adr r0, es3_sdrc_fix
355 ldr r1, sram_base
356 ldr r2, es3_sdrc_fix_sz
357 mov r2, r2, ror #2
358 copy_to_sram:
359 ldmia r0!, {r3} @ val = *src
360 stmia r1!, {r3} @ *dst = val
361 subs r2, r2, #0x1 @ num_words--
362 bne copy_to_sram
363 ldr r1, sram_base
364 blx r1
365 b omap3_restore @ Fall through to OMAP3 common code
366 ENDPROC(omap3_restore_es3)
367
368 ENTRY(omap3_restore_3630)
369 ldr r1, pm_prepwstst_core_p
370 ldr r2, [r1]
371 and r2, r2, #0x3
372 cmp r2, #0x0 @ Check if previous power state of CORE is OFF
373 bne omap3_restore @ Fall through to OMAP3 common code
374 /* Disable RTA before giving control */
375 ldr r1, control_mem_rta
376 mov r2, #OMAP36XX_RTA_DISABLE
377 str r2, [r1]
378 ENDPROC(omap3_restore_3630)
379
380 /* Fall through to common code for the remaining logic */
381
382 ENTRY(omap3_restore)
383 /*
384 * Read the pwstctrl register to check the reason for mpu reset.
385 * This tells us what was lost.
386 */
387 ldr r1, pm_pwstctrl_mpu
388 ldr r2, [r1]
389 and r2, r2, #0x3
390 cmp r2, #0x0 @ Check if target power state was OFF or RET
391 bne logic_l1_restore
392
393 adr r1, l2dis_3630_offset @ address for offset
394 ldr r0, [r1] @ value for offset
395 ldr r0, [r1, r0] @ value at l2dis_3630
396 cmp r0, #0x1 @ should we disable L2 on 3630?
397 bne skipl2dis
398 mrc p15, 0, r0, c1, c0, 1
399 bic r0, r0, #2 @ disable L2 cache
400 mcr p15, 0, r0, c1, c0, 1
401 skipl2dis:
402 ldr r0, control_stat
403 ldr r1, [r0]
404 and r1, #0x700
405 cmp r1, #0x300
406 beq l2_inv_gp
407 adr r0, l2_inv_api_params_offset
408 ldr r3, [r0]
409 add r3, r3, r0 @ r3 points to dummy parameters
410 mov r0, #40 @ set service ID for PPA
411 mov r12, r0 @ copy secure Service ID in r12
412 mov r1, #0 @ set task id for ROM code in r1
413 mov r2, #4 @ set some flags in r2, r6
414 mov r6, #0xff
415 dsb @ data write barrier
416 dmb @ data memory barrier
417 smc #1 @ call SMI monitor (smi #1)
418 /* Write to Aux control register to set some bits */
419 mov r0, #42 @ set service ID for PPA
420 mov r12, r0 @ copy secure Service ID in r12
421 mov r1, #0 @ set task id for ROM code in r1
422 mov r2, #4 @ set some flags in r2, r6
423 mov r6, #0xff
424 ldr r4, scratchpad_base
425 ldr r3, [r4, #0xBC] @ r3 points to parameters
426 dsb @ data write barrier
427 dmb @ data memory barrier
428 smc #1 @ call SMI monitor (smi #1)
429
430 #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
431 /* Restore L2 aux control register */
432 @ set service ID for PPA
433 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
434 mov r12, r0 @ copy service ID in r12
435 mov r1, #0 @ set task ID for ROM code in r1
436 mov r2, #4 @ set some flags in r2, r6
437 mov r6, #0xff
438 ldr r4, scratchpad_base
439 ldr r3, [r4, #0xBC]
440 adds r3, r3, #8 @ r3 points to parameters
441 dsb @ data write barrier
442 dmb @ data memory barrier
443 smc #1 @ call SMI monitor (smi #1)
444 #endif
445 b logic_l1_restore
446
447 .align
448 l2_inv_api_params_offset:
449 .long l2_inv_api_params - .
450 l2_inv_gp:
451 /* Execute smi to invalidate L2 cache */
452 mov r12, #0x1 @ set up to invalidate L2
453 smc #0 @ Call SMI monitor (smieq)
454 /* Write to Aux control register to set some bits */
455 ldr r4, scratchpad_base
456 ldr r3, [r4,#0xBC]
457 ldr r0, [r3,#4]
458 mov r12, #0x3
459 smc #0 @ Call SMI monitor (smieq)
460 ldr r4, scratchpad_base
461 ldr r3, [r4,#0xBC]
462 ldr r0, [r3,#12]
463 mov r12, #0x2
464 smc #0 @ Call SMI monitor (smieq)
465 logic_l1_restore:
466 adr r0, l2dis_3630_offset @ adress for offset
467 ldr r1, [r0] @ value for offset
468 ldr r1, [r0, r1] @ value at l2dis_3630
469 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
470 bne skipl2reen
471 mrc p15, 0, r1, c1, c0, 1
472 orr r1, r1, #2 @ re-enable L2 cache
473 mcr p15, 0, r1, c1, c0, 1
474 skipl2reen:
475
476 /* Now branch to the common CPU resume function */
477 b cpu_resume
478 ENDPROC(omap3_restore)
479
480 .ltorg
481
482 /*
483 * Local variables
484 */
485 pm_prepwstst_core_p:
486 .word PM_PREPWSTST_CORE_P
487 pm_pwstctrl_mpu:
488 .word PM_PWSTCTRL_MPU_P
489 scratchpad_base:
490 .word SCRATCHPAD_BASE_P
491 sram_base:
492 .word SRAM_BASE_P + 0x8000
493 control_stat:
494 .word CONTROL_STAT
495 control_mem_rta:
496 .word CONTROL_MEM_RTA_CTRL
497 l2dis_3630_offset:
498 .long l2dis_3630 - .
499
500 .data
501 .align 2
502 l2dis_3630:
503 .word 0
504
505 .data
506 .align 2
507 l2_inv_api_params:
508 .word 0x1, 0x00
509
510 /*
511 * Internal functions
512 */
513
514 /*
515 * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
516 * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
517 */
518 .text
519 .align 3
520 ENTRY(es3_sdrc_fix)
521 ldr r4, sdrc_syscfg @ get config addr
522 ldr r5, [r4] @ get value
523 tst r5, #0x100 @ is part access blocked
524 it eq
525 biceq r5, r5, #0x100 @ clear bit if set
526 str r5, [r4] @ write back change
527 ldr r4, sdrc_mr_0 @ get config addr
528 ldr r5, [r4] @ get value
529 str r5, [r4] @ write back change
530 ldr r4, sdrc_emr2_0 @ get config addr
531 ldr r5, [r4] @ get value
532 str r5, [r4] @ write back change
533 ldr r4, sdrc_manual_0 @ get config addr
534 mov r5, #0x2 @ autorefresh command
535 str r5, [r4] @ kick off refreshes
536 ldr r4, sdrc_mr_1 @ get config addr
537 ldr r5, [r4] @ get value
538 str r5, [r4] @ write back change
539 ldr r4, sdrc_emr2_1 @ get config addr
540 ldr r5, [r4] @ get value
541 str r5, [r4] @ write back change
542 ldr r4, sdrc_manual_1 @ get config addr
543 mov r5, #0x2 @ autorefresh command
544 str r5, [r4] @ kick off refreshes
545 bx lr
546
547 /*
548 * Local variables
549 */
550 .align
551 sdrc_syscfg:
552 .word SDRC_SYSCONFIG_P
553 sdrc_mr_0:
554 .word SDRC_MR_0_P
555 sdrc_emr2_0:
556 .word SDRC_EMR2_0_P
557 sdrc_manual_0:
558 .word SDRC_MANUAL_0_P
559 sdrc_mr_1:
560 .word SDRC_MR_1_P
561 sdrc_emr2_1:
562 .word SDRC_EMR2_1_P
563 sdrc_manual_1:
564 .word SDRC_MANUAL_1_P
565 ENDPROC(es3_sdrc_fix)
566 ENTRY(es3_sdrc_fix_sz)
567 .word . - es3_sdrc_fix