]>
Commit | Line | Data |
---|---|---|
46bef646 SG |
1 | /* |
2 | * This program is free software; you can redistribute it and/or modify | |
3 | * it under the terms of the GNU General Public License, version 2, as | |
4 | * published by the Free Software Foundation. | |
5 | * | |
6 | * This program is distributed in the hope that it will be useful, | |
7 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
8 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
9 | * GNU General Public License for more details. | |
10 | * | |
11 | * Derived from book3s_hv_rmhandlers.S, which is: | |
12 | * | |
13 | * Copyright 2011 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> | |
14 | * | |
15 | */ | |
16 | ||
17 | #include <asm/reg.h> | |
18 | #include <asm/ppc_asm.h> | |
19 | #include <asm/asm-offsets.h> | |
20 | #include <asm/export.h> | |
21 | #include <asm/tm.h> | |
22 | #include <asm/cputable.h> | |
23 | ||
24 | #ifdef CONFIG_PPC_TRANSACTIONAL_MEM | |
25 | #define VCPU_GPRS_TM(reg) (((reg) * ULONG_SIZE) + VCPU_GPR_TM) | |
26 | ||
27 | /* | |
28 | * Save transactional state and TM-related registers. | |
351ac5bb SG |
29 | * Called with: |
30 | * - r3 pointing to the vcpu struct | |
31 | * - r4 points to the MSR with current TS bits: | |
32 | * (For HV KVM, it is VCPU_MSR ; For PR KVM, it is host MSR). | |
46bef646 | 33 | * This can modify all checkpointed registers, but |
351ac5bb | 34 | * restores r1, r2 before exit. |
46bef646 SG |
35 | */ |
36 | _GLOBAL(kvmppc_save_tm) | |
37 | mflr r0 | |
38 | std r0, PPC_LR_STKOFF(r1) | |
39 | ||
40 | /* Turn on TM. */ | |
41 | mfmsr r8 | |
42 | li r0, 1 | |
43 | rldimi r8, r0, MSR_TM_LG, 63-MSR_TM_LG | |
44 | mtmsrd r8 | |
45 | ||
351ac5bb | 46 | rldicl. r4, r4, 64 - MSR_TS_S_LG, 62 |
46bef646 | 47 | beq 1f /* TM not active in guest. */ |
46bef646 | 48 | |
351ac5bb SG |
49 | std r1, HSTATE_SCRATCH2(r13) |
50 | std r3, HSTATE_SCRATCH1(r13) | |
46bef646 SG |
51 | |
52 | #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE | |
53 | BEGIN_FTR_SECTION | |
54 | /* Emulation of the treclaim instruction needs TEXASR before treclaim */ | |
55 | mfspr r6, SPRN_TEXASR | |
351ac5bb | 56 | std r6, VCPU_ORIG_TEXASR(r3) |
46bef646 SG |
57 | END_FTR_SECTION_IFSET(CPU_FTR_P9_TM_HV_ASSIST) |
58 | #endif | |
59 | ||
60 | /* Clear the MSR RI since r1, r13 are all going to be foobar. */ | |
61 | li r5, 0 | |
62 | mtmsrd r5, 1 | |
63 | ||
351ac5bb SG |
64 | li r3, TM_CAUSE_KVM_RESCHED |
65 | ||
46bef646 SG |
66 | /* All GPRs are volatile at this point. */ |
67 | TRECLAIM(R3) | |
68 | ||
69 | /* Temporarily store r13 and r9 so we have some regs to play with */ | |
70 | SET_SCRATCH0(r13) | |
71 | GET_PACA(r13) | |
72 | std r9, PACATMSCRATCH(r13) | |
351ac5bb | 73 | ld r9, HSTATE_SCRATCH1(r13) |
46bef646 SG |
74 | |
75 | /* Get a few more GPRs free. */ | |
76 | std r29, VCPU_GPRS_TM(29)(r9) | |
77 | std r30, VCPU_GPRS_TM(30)(r9) | |
78 | std r31, VCPU_GPRS_TM(31)(r9) | |
79 | ||
80 | /* Save away PPR and DSCR soon so don't run with user values. */ | |
81 | mfspr r31, SPRN_PPR | |
82 | HMT_MEDIUM | |
83 | mfspr r30, SPRN_DSCR | |
84 | #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE | |
85 | ld r29, HSTATE_DSCR(r13) | |
86 | mtspr SPRN_DSCR, r29 | |
87 | #endif | |
88 | ||
89 | /* Save all but r9, r13 & r29-r31 */ | |
90 | reg = 0 | |
91 | .rept 29 | |
92 | .if (reg != 9) && (reg != 13) | |
93 | std reg, VCPU_GPRS_TM(reg)(r9) | |
94 | .endif | |
95 | reg = reg + 1 | |
96 | .endr | |
97 | /* ... now save r13 */ | |
98 | GET_SCRATCH0(r4) | |
99 | std r4, VCPU_GPRS_TM(13)(r9) | |
100 | /* ... and save r9 */ | |
101 | ld r4, PACATMSCRATCH(r13) | |
102 | std r4, VCPU_GPRS_TM(9)(r9) | |
103 | ||
104 | /* Reload stack pointer and TOC. */ | |
351ac5bb | 105 | ld r1, HSTATE_SCRATCH2(r13) |
46bef646 SG |
106 | ld r2, PACATOC(r13) |
107 | ||
108 | /* Set MSR RI now we have r1 and r13 back. */ | |
109 | li r5, MSR_RI | |
110 | mtmsrd r5, 1 | |
111 | ||
112 | /* Save away checkpinted SPRs. */ | |
113 | std r31, VCPU_PPR_TM(r9) | |
114 | std r30, VCPU_DSCR_TM(r9) | |
115 | mflr r5 | |
116 | mfcr r6 | |
117 | mfctr r7 | |
118 | mfspr r8, SPRN_AMR | |
119 | mfspr r10, SPRN_TAR | |
120 | mfxer r11 | |
121 | std r5, VCPU_LR_TM(r9) | |
122 | stw r6, VCPU_CR_TM(r9) | |
123 | std r7, VCPU_CTR_TM(r9) | |
124 | std r8, VCPU_AMR_TM(r9) | |
125 | std r10, VCPU_TAR_TM(r9) | |
126 | std r11, VCPU_XER_TM(r9) | |
127 | ||
128 | /* Restore r12 as trap number. */ | |
129 | lwz r12, VCPU_TRAP(r9) | |
130 | ||
131 | /* Save FP/VSX. */ | |
132 | addi r3, r9, VCPU_FPRS_TM | |
133 | bl store_fp_state | |
134 | addi r3, r9, VCPU_VRS_TM | |
135 | bl store_vr_state | |
136 | mfspr r6, SPRN_VRSAVE | |
137 | stw r6, VCPU_VRSAVE_TM(r9) | |
138 | 1: | |
139 | /* | |
140 | * We need to save these SPRs after the treclaim so that the software | |
141 | * error code is recorded correctly in the TEXASR. Also the user may | |
142 | * change these outside of a transaction, so they must always be | |
143 | * context switched. | |
144 | */ | |
145 | mfspr r7, SPRN_TEXASR | |
146 | std r7, VCPU_TEXASR(r9) | |
147 | 11: | |
148 | mfspr r5, SPRN_TFHAR | |
149 | mfspr r6, SPRN_TFIAR | |
150 | std r5, VCPU_TFHAR(r9) | |
151 | std r6, VCPU_TFIAR(r9) | |
152 | ||
153 | ld r0, PPC_LR_STKOFF(r1) | |
154 | mtlr r0 | |
155 | blr | |
156 | ||
157 | /* | |
158 | * Restore transactional state and TM-related registers. | |
351ac5bb SG |
159 | * Called with: |
160 | * - r3 pointing to the vcpu struct. | |
161 | * - r4 is the guest MSR with desired TS bits: | |
162 | * For HV KVM, it is VCPU_MSR | |
163 | * For PR KVM, it is provided by caller | |
46bef646 | 164 | * This potentially modifies all checkpointed registers. |
351ac5bb | 165 | * It restores r1, r2 from the PACA. |
46bef646 SG |
166 | */ |
167 | _GLOBAL(kvmppc_restore_tm) | |
168 | mflr r0 | |
169 | std r0, PPC_LR_STKOFF(r1) | |
170 | ||
171 | /* Turn on TM/FP/VSX/VMX so we can restore them. */ | |
172 | mfmsr r5 | |
173 | li r6, MSR_TM >> 32 | |
174 | sldi r6, r6, 32 | |
175 | or r5, r5, r6 | |
176 | ori r5, r5, MSR_FP | |
177 | oris r5, r5, (MSR_VEC | MSR_VSX)@h | |
178 | mtmsrd r5 | |
179 | ||
180 | /* | |
181 | * The user may change these outside of a transaction, so they must | |
182 | * always be context switched. | |
183 | */ | |
351ac5bb SG |
184 | ld r5, VCPU_TFHAR(r3) |
185 | ld r6, VCPU_TFIAR(r3) | |
186 | ld r7, VCPU_TEXASR(r3) | |
46bef646 SG |
187 | mtspr SPRN_TFHAR, r5 |
188 | mtspr SPRN_TFIAR, r6 | |
189 | mtspr SPRN_TEXASR, r7 | |
190 | ||
351ac5bb | 191 | mr r5, r4 |
46bef646 SG |
192 | rldicl. r5, r5, 64 - MSR_TS_S_LG, 62 |
193 | beqlr /* TM not active in guest */ | |
351ac5bb | 194 | std r1, HSTATE_SCRATCH2(r13) |
46bef646 SG |
195 | |
196 | /* Make sure the failure summary is set, otherwise we'll program check | |
197 | * when we trechkpt. It's possible that this might have been not set | |
198 | * on a kvmppc_set_one_reg() call but we shouldn't let this crash the | |
199 | * host. | |
200 | */ | |
201 | oris r7, r7, (TEXASR_FS)@h | |
202 | mtspr SPRN_TEXASR, r7 | |
203 | ||
204 | /* | |
205 | * We need to load up the checkpointed state for the guest. | |
206 | * We need to do this early as it will blow away any GPRs, VSRs and | |
207 | * some SPRs. | |
208 | */ | |
209 | ||
351ac5bb | 210 | mr r31, r3 |
46bef646 SG |
211 | addi r3, r31, VCPU_FPRS_TM |
212 | bl load_fp_state | |
213 | addi r3, r31, VCPU_VRS_TM | |
214 | bl load_vr_state | |
351ac5bb SG |
215 | mr r3, r31 |
216 | lwz r7, VCPU_VRSAVE_TM(r3) | |
46bef646 SG |
217 | mtspr SPRN_VRSAVE, r7 |
218 | ||
351ac5bb SG |
219 | ld r5, VCPU_LR_TM(r3) |
220 | lwz r6, VCPU_CR_TM(r3) | |
221 | ld r7, VCPU_CTR_TM(r3) | |
222 | ld r8, VCPU_AMR_TM(r3) | |
223 | ld r9, VCPU_TAR_TM(r3) | |
224 | ld r10, VCPU_XER_TM(r3) | |
46bef646 SG |
225 | mtlr r5 |
226 | mtcr r6 | |
227 | mtctr r7 | |
228 | mtspr SPRN_AMR, r8 | |
229 | mtspr SPRN_TAR, r9 | |
230 | mtxer r10 | |
231 | ||
232 | /* | |
233 | * Load up PPR and DSCR values but don't put them in the actual SPRs | |
234 | * till the last moment to avoid running with userspace PPR and DSCR for | |
235 | * too long. | |
236 | */ | |
351ac5bb SG |
237 | ld r29, VCPU_DSCR_TM(r3) |
238 | ld r30, VCPU_PPR_TM(r3) | |
46bef646 SG |
239 | |
240 | std r2, PACATMSCRATCH(r13) /* Save TOC */ | |
241 | ||
242 | /* Clear the MSR RI since r1, r13 are all going to be foobar. */ | |
243 | li r5, 0 | |
244 | mtmsrd r5, 1 | |
245 | ||
246 | /* Load GPRs r0-r28 */ | |
247 | reg = 0 | |
248 | .rept 29 | |
249 | ld reg, VCPU_GPRS_TM(reg)(r31) | |
250 | reg = reg + 1 | |
251 | .endr | |
252 | ||
253 | mtspr SPRN_DSCR, r29 | |
254 | mtspr SPRN_PPR, r30 | |
255 | ||
256 | /* Load final GPRs */ | |
257 | ld 29, VCPU_GPRS_TM(29)(r31) | |
258 | ld 30, VCPU_GPRS_TM(30)(r31) | |
259 | ld 31, VCPU_GPRS_TM(31)(r31) | |
260 | ||
261 | /* TM checkpointed state is now setup. All GPRs are now volatile. */ | |
262 | TRECHKPT | |
263 | ||
264 | /* Now let's get back the state we need. */ | |
265 | HMT_MEDIUM | |
266 | GET_PACA(r13) | |
267 | #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE | |
268 | ld r29, HSTATE_DSCR(r13) | |
269 | mtspr SPRN_DSCR, r29 | |
46bef646 | 270 | #endif |
351ac5bb | 271 | ld r1, HSTATE_SCRATCH2(r13) |
46bef646 SG |
272 | ld r2, PACATMSCRATCH(r13) |
273 | ||
274 | /* Set the MSR RI since we have our registers back. */ | |
275 | li r5, MSR_RI | |
276 | mtmsrd r5, 1 | |
277 | ld r0, PPC_LR_STKOFF(r1) | |
278 | mtlr r0 | |
279 | blr | |
280 | #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ |