]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/tcg-op.h
valgrind/i386: avoid false positives on KVM_SET_CLOCK ioctl
[mirror_qemu.git] / tcg / tcg-op.h
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24 #include "tcg.h"
25 #include "exec/helper-proto.h"
26 #include "exec/helper-gen.h"
27
28 int gen_new_label(void);
29
30 static inline void tcg_gen_op0(TCGOpcode opc)
31 {
32 *tcg_ctx.gen_opc_ptr++ = opc;
33 }
34
35 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1)
36 {
37 *tcg_ctx.gen_opc_ptr++ = opc;
38 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
39 }
40
41 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1)
42 {
43 *tcg_ctx.gen_opc_ptr++ = opc;
44 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
45 }
46
47 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1)
48 {
49 *tcg_ctx.gen_opc_ptr++ = opc;
50 *tcg_ctx.gen_opparam_ptr++ = arg1;
51 }
52
53 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2)
54 {
55 *tcg_ctx.gen_opc_ptr++ = opc;
56 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
57 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
58 }
59
60 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2)
61 {
62 *tcg_ctx.gen_opc_ptr++ = opc;
63 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
64 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
65 }
66
67 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2)
68 {
69 *tcg_ctx.gen_opc_ptr++ = opc;
70 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
71 *tcg_ctx.gen_opparam_ptr++ = arg2;
72 }
73
74 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2)
75 {
76 *tcg_ctx.gen_opc_ptr++ = opc;
77 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
78 *tcg_ctx.gen_opparam_ptr++ = arg2;
79 }
80
81 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2)
82 {
83 *tcg_ctx.gen_opc_ptr++ = opc;
84 *tcg_ctx.gen_opparam_ptr++ = arg1;
85 *tcg_ctx.gen_opparam_ptr++ = arg2;
86 }
87
88 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
89 TCGv_i32 arg3)
90 {
91 *tcg_ctx.gen_opc_ptr++ = opc;
92 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
93 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
94 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
95 }
96
97 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
98 TCGv_i64 arg3)
99 {
100 *tcg_ctx.gen_opc_ptr++ = opc;
101 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
102 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
103 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
104 }
105
106 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1,
107 TCGv_i32 arg2, TCGArg arg3)
108 {
109 *tcg_ctx.gen_opc_ptr++ = opc;
110 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
111 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
112 *tcg_ctx.gen_opparam_ptr++ = arg3;
113 }
114
115 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1,
116 TCGv_i64 arg2, TCGArg arg3)
117 {
118 *tcg_ctx.gen_opc_ptr++ = opc;
119 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
120 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
121 *tcg_ctx.gen_opparam_ptr++ = arg3;
122 }
123
124 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
125 TCGv_ptr base, TCGArg offset)
126 {
127 *tcg_ctx.gen_opc_ptr++ = opc;
128 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
129 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
130 *tcg_ctx.gen_opparam_ptr++ = offset;
131 }
132
133 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
134 TCGv_ptr base, TCGArg offset)
135 {
136 *tcg_ctx.gen_opc_ptr++ = opc;
137 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
138 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
139 *tcg_ctx.gen_opparam_ptr++ = offset;
140 }
141
142 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
143 TCGv_i32 arg3, TCGv_i32 arg4)
144 {
145 *tcg_ctx.gen_opc_ptr++ = opc;
146 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
147 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
148 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
149 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
150 }
151
152 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
153 TCGv_i64 arg3, TCGv_i64 arg4)
154 {
155 *tcg_ctx.gen_opc_ptr++ = opc;
156 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
157 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
158 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
159 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
160 }
161
162 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
163 TCGv_i32 arg3, TCGArg arg4)
164 {
165 *tcg_ctx.gen_opc_ptr++ = opc;
166 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
167 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
168 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
169 *tcg_ctx.gen_opparam_ptr++ = arg4;
170 }
171
172 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
173 TCGv_i64 arg3, TCGArg arg4)
174 {
175 *tcg_ctx.gen_opc_ptr++ = opc;
176 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
177 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
178 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
179 *tcg_ctx.gen_opparam_ptr++ = arg4;
180 }
181
182 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
183 TCGArg arg3, TCGArg arg4)
184 {
185 *tcg_ctx.gen_opc_ptr++ = opc;
186 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
187 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
188 *tcg_ctx.gen_opparam_ptr++ = arg3;
189 *tcg_ctx.gen_opparam_ptr++ = arg4;
190 }
191
192 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
193 TCGArg arg3, TCGArg arg4)
194 {
195 *tcg_ctx.gen_opc_ptr++ = opc;
196 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
197 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
198 *tcg_ctx.gen_opparam_ptr++ = arg3;
199 *tcg_ctx.gen_opparam_ptr++ = arg4;
200 }
201
202 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
203 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5)
204 {
205 *tcg_ctx.gen_opc_ptr++ = opc;
206 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
207 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
208 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
209 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
210 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
211 }
212
213 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
214 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5)
215 {
216 *tcg_ctx.gen_opc_ptr++ = opc;
217 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
218 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
219 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
220 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
221 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
222 }
223
224 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
225 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5)
226 {
227 *tcg_ctx.gen_opc_ptr++ = opc;
228 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
229 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
230 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
231 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
232 *tcg_ctx.gen_opparam_ptr++ = arg5;
233 }
234
235 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
236 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5)
237 {
238 *tcg_ctx.gen_opc_ptr++ = opc;
239 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
240 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
241 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
242 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
243 *tcg_ctx.gen_opparam_ptr++ = arg5;
244 }
245
246 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1,
247 TCGv_i32 arg2, TCGv_i32 arg3,
248 TCGArg arg4, TCGArg arg5)
249 {
250 *tcg_ctx.gen_opc_ptr++ = opc;
251 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
252 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
253 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
254 *tcg_ctx.gen_opparam_ptr++ = arg4;
255 *tcg_ctx.gen_opparam_ptr++ = arg5;
256 }
257
258 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1,
259 TCGv_i64 arg2, TCGv_i64 arg3,
260 TCGArg arg4, TCGArg arg5)
261 {
262 *tcg_ctx.gen_opc_ptr++ = opc;
263 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
264 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
265 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
266 *tcg_ctx.gen_opparam_ptr++ = arg4;
267 *tcg_ctx.gen_opparam_ptr++ = arg5;
268 }
269
270 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
271 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5,
272 TCGv_i32 arg6)
273 {
274 *tcg_ctx.gen_opc_ptr++ = opc;
275 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
276 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
277 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
278 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
279 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
280 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg6);
281 }
282
283 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
284 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5,
285 TCGv_i64 arg6)
286 {
287 *tcg_ctx.gen_opc_ptr++ = opc;
288 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
289 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
290 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
291 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
292 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
293 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg6);
294 }
295
296 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
297 TCGv_i32 arg3, TCGv_i32 arg4,
298 TCGv_i32 arg5, TCGArg arg6)
299 {
300 *tcg_ctx.gen_opc_ptr++ = opc;
301 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
302 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
303 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
304 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
305 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
306 *tcg_ctx.gen_opparam_ptr++ = arg6;
307 }
308
309 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
310 TCGv_i64 arg3, TCGv_i64 arg4,
311 TCGv_i64 arg5, TCGArg arg6)
312 {
313 *tcg_ctx.gen_opc_ptr++ = opc;
314 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
315 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
316 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
317 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
318 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
319 *tcg_ctx.gen_opparam_ptr++ = arg6;
320 }
321
322 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1,
323 TCGv_i32 arg2, TCGv_i32 arg3,
324 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6)
325 {
326 *tcg_ctx.gen_opc_ptr++ = opc;
327 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
328 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
329 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
330 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
331 *tcg_ctx.gen_opparam_ptr++ = arg5;
332 *tcg_ctx.gen_opparam_ptr++ = arg6;
333 }
334
335 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1,
336 TCGv_i64 arg2, TCGv_i64 arg3,
337 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6)
338 {
339 *tcg_ctx.gen_opc_ptr++ = opc;
340 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
341 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
342 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
343 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
344 *tcg_ctx.gen_opparam_ptr++ = arg5;
345 *tcg_ctx.gen_opparam_ptr++ = arg6;
346 }
347
348 static inline void tcg_add_param_i32(TCGv_i32 val)
349 {
350 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
351 }
352
353 static inline void tcg_add_param_i64(TCGv_i64 val)
354 {
355 #if TCG_TARGET_REG_BITS == 32
356 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_LOW(val));
357 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_HIGH(val));
358 #else
359 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
360 #endif
361 }
362
363 static inline void gen_set_label(int n)
364 {
365 tcg_gen_op1i(INDEX_op_set_label, n);
366 }
367
368 static inline void tcg_gen_br(int label)
369 {
370 tcg_gen_op1i(INDEX_op_br, label);
371 }
372
373 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
374 {
375 if (!TCGV_EQUAL_I32(ret, arg))
376 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
377 }
378
379 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
380 {
381 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
382 }
383
384 /* 32 bit ops */
385
386 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
387 {
388 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
389 }
390
391 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
392 {
393 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
394 }
395
396 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
397 {
398 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
399 }
400
401 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
402 {
403 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
404 }
405
406 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
407 {
408 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
409 }
410
411 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
412 {
413 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
414 }
415
416 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
417 {
418 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
419 }
420
421 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
422 {
423 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
424 }
425
426 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
427 {
428 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
429 }
430
431 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
432 {
433 /* some cases can be optimized here */
434 if (arg2 == 0) {
435 tcg_gen_mov_i32(ret, arg1);
436 } else {
437 TCGv_i32 t0 = tcg_const_i32(arg2);
438 tcg_gen_add_i32(ret, arg1, t0);
439 tcg_temp_free_i32(t0);
440 }
441 }
442
443 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
444 {
445 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
446 }
447
448 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
449 {
450 TCGv_i32 t0 = tcg_const_i32(arg1);
451 tcg_gen_sub_i32(ret, t0, arg2);
452 tcg_temp_free_i32(t0);
453 }
454
455 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
456 {
457 /* some cases can be optimized here */
458 if (arg2 == 0) {
459 tcg_gen_mov_i32(ret, arg1);
460 } else {
461 TCGv_i32 t0 = tcg_const_i32(arg2);
462 tcg_gen_sub_i32(ret, arg1, t0);
463 tcg_temp_free_i32(t0);
464 }
465 }
466
467 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
468 {
469 if (TCGV_EQUAL_I32(arg1, arg2)) {
470 tcg_gen_mov_i32(ret, arg1);
471 } else {
472 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
473 }
474 }
475
476 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
477 {
478 TCGv_i32 t0;
479 /* Some cases can be optimized here. */
480 switch (arg2) {
481 case 0:
482 tcg_gen_movi_i32(ret, 0);
483 return;
484 case 0xffffffffu:
485 tcg_gen_mov_i32(ret, arg1);
486 return;
487 case 0xffu:
488 /* Don't recurse with tcg_gen_ext8u_i32. */
489 if (TCG_TARGET_HAS_ext8u_i32) {
490 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
491 return;
492 }
493 break;
494 case 0xffffu:
495 if (TCG_TARGET_HAS_ext16u_i32) {
496 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
497 return;
498 }
499 break;
500 }
501 t0 = tcg_const_i32(arg2);
502 tcg_gen_and_i32(ret, arg1, t0);
503 tcg_temp_free_i32(t0);
504 }
505
506 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
507 {
508 if (TCGV_EQUAL_I32(arg1, arg2)) {
509 tcg_gen_mov_i32(ret, arg1);
510 } else {
511 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
512 }
513 }
514
515 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
516 {
517 /* Some cases can be optimized here. */
518 if (arg2 == -1) {
519 tcg_gen_movi_i32(ret, -1);
520 } else if (arg2 == 0) {
521 tcg_gen_mov_i32(ret, arg1);
522 } else {
523 TCGv_i32 t0 = tcg_const_i32(arg2);
524 tcg_gen_or_i32(ret, arg1, t0);
525 tcg_temp_free_i32(t0);
526 }
527 }
528
529 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
530 {
531 if (TCGV_EQUAL_I32(arg1, arg2)) {
532 tcg_gen_movi_i32(ret, 0);
533 } else {
534 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
535 }
536 }
537
538 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
539 {
540 /* Some cases can be optimized here. */
541 if (arg2 == 0) {
542 tcg_gen_mov_i32(ret, arg1);
543 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
544 /* Don't recurse with tcg_gen_not_i32. */
545 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
546 } else {
547 TCGv_i32 t0 = tcg_const_i32(arg2);
548 tcg_gen_xor_i32(ret, arg1, t0);
549 tcg_temp_free_i32(t0);
550 }
551 }
552
553 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
554 {
555 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
556 }
557
558 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
559 {
560 if (arg2 == 0) {
561 tcg_gen_mov_i32(ret, arg1);
562 } else {
563 TCGv_i32 t0 = tcg_const_i32(arg2);
564 tcg_gen_shl_i32(ret, arg1, t0);
565 tcg_temp_free_i32(t0);
566 }
567 }
568
569 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
570 {
571 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
572 }
573
574 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
575 {
576 if (arg2 == 0) {
577 tcg_gen_mov_i32(ret, arg1);
578 } else {
579 TCGv_i32 t0 = tcg_const_i32(arg2);
580 tcg_gen_shr_i32(ret, arg1, t0);
581 tcg_temp_free_i32(t0);
582 }
583 }
584
585 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
586 {
587 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
588 }
589
590 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
591 {
592 if (arg2 == 0) {
593 tcg_gen_mov_i32(ret, arg1);
594 } else {
595 TCGv_i32 t0 = tcg_const_i32(arg2);
596 tcg_gen_sar_i32(ret, arg1, t0);
597 tcg_temp_free_i32(t0);
598 }
599 }
600
601 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1,
602 TCGv_i32 arg2, int label_index)
603 {
604 if (cond == TCG_COND_ALWAYS) {
605 tcg_gen_br(label_index);
606 } else if (cond != TCG_COND_NEVER) {
607 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
608 }
609 }
610
611 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1,
612 int32_t arg2, int label_index)
613 {
614 if (cond == TCG_COND_ALWAYS) {
615 tcg_gen_br(label_index);
616 } else if (cond != TCG_COND_NEVER) {
617 TCGv_i32 t0 = tcg_const_i32(arg2);
618 tcg_gen_brcond_i32(cond, arg1, t0, label_index);
619 tcg_temp_free_i32(t0);
620 }
621 }
622
623 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
624 TCGv_i32 arg1, TCGv_i32 arg2)
625 {
626 if (cond == TCG_COND_ALWAYS) {
627 tcg_gen_movi_i32(ret, 1);
628 } else if (cond == TCG_COND_NEVER) {
629 tcg_gen_movi_i32(ret, 0);
630 } else {
631 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
632 }
633 }
634
635 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
636 TCGv_i32 arg1, int32_t arg2)
637 {
638 if (cond == TCG_COND_ALWAYS) {
639 tcg_gen_movi_i32(ret, 1);
640 } else if (cond == TCG_COND_NEVER) {
641 tcg_gen_movi_i32(ret, 0);
642 } else {
643 TCGv_i32 t0 = tcg_const_i32(arg2);
644 tcg_gen_setcond_i32(cond, ret, arg1, t0);
645 tcg_temp_free_i32(t0);
646 }
647 }
648
649 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
650 {
651 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
652 }
653
654 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
655 {
656 TCGv_i32 t0 = tcg_const_i32(arg2);
657 tcg_gen_mul_i32(ret, arg1, t0);
658 tcg_temp_free_i32(t0);
659 }
660
661 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
662 {
663 if (TCG_TARGET_HAS_div_i32) {
664 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
665 } else if (TCG_TARGET_HAS_div2_i32) {
666 TCGv_i32 t0 = tcg_temp_new_i32();
667 tcg_gen_sari_i32(t0, arg1, 31);
668 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
669 tcg_temp_free_i32(t0);
670 } else {
671 gen_helper_div_i32(ret, arg1, arg2);
672 }
673 }
674
675 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
676 {
677 if (TCG_TARGET_HAS_rem_i32) {
678 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
679 } else if (TCG_TARGET_HAS_div_i32) {
680 TCGv_i32 t0 = tcg_temp_new_i32();
681 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
682 tcg_gen_mul_i32(t0, t0, arg2);
683 tcg_gen_sub_i32(ret, arg1, t0);
684 tcg_temp_free_i32(t0);
685 } else if (TCG_TARGET_HAS_div2_i32) {
686 TCGv_i32 t0 = tcg_temp_new_i32();
687 tcg_gen_sari_i32(t0, arg1, 31);
688 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
689 tcg_temp_free_i32(t0);
690 } else {
691 gen_helper_rem_i32(ret, arg1, arg2);
692 }
693 }
694
695 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
696 {
697 if (TCG_TARGET_HAS_div_i32) {
698 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
699 } else if (TCG_TARGET_HAS_div2_i32) {
700 TCGv_i32 t0 = tcg_temp_new_i32();
701 tcg_gen_movi_i32(t0, 0);
702 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
703 tcg_temp_free_i32(t0);
704 } else {
705 gen_helper_divu_i32(ret, arg1, arg2);
706 }
707 }
708
709 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
710 {
711 if (TCG_TARGET_HAS_rem_i32) {
712 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
713 } else if (TCG_TARGET_HAS_div_i32) {
714 TCGv_i32 t0 = tcg_temp_new_i32();
715 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
716 tcg_gen_mul_i32(t0, t0, arg2);
717 tcg_gen_sub_i32(ret, arg1, t0);
718 tcg_temp_free_i32(t0);
719 } else if (TCG_TARGET_HAS_div2_i32) {
720 TCGv_i32 t0 = tcg_temp_new_i32();
721 tcg_gen_movi_i32(t0, 0);
722 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
723 tcg_temp_free_i32(t0);
724 } else {
725 gen_helper_remu_i32(ret, arg1, arg2);
726 }
727 }
728
729 #if TCG_TARGET_REG_BITS == 32
730
731 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
732 {
733 if (!TCGV_EQUAL_I64(ret, arg)) {
734 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
735 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
736 }
737 }
738
739 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
740 {
741 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
742 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
743 }
744
745 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
746 tcg_target_long offset)
747 {
748 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
749 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
750 }
751
752 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
753 tcg_target_long offset)
754 {
755 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
756 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
757 }
758
759 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
760 tcg_target_long offset)
761 {
762 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
763 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
764 }
765
766 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
767 tcg_target_long offset)
768 {
769 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
770 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
771 }
772
773 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
774 tcg_target_long offset)
775 {
776 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
777 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
778 }
779
780 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
781 tcg_target_long offset)
782 {
783 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
784 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
785 }
786
787 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
788 tcg_target_long offset)
789 {
790 /* since arg2 and ret have different types, they cannot be the
791 same temporary */
792 #ifdef HOST_WORDS_BIGENDIAN
793 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
794 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
795 #else
796 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
797 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
798 #endif
799 }
800
801 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
802 tcg_target_long offset)
803 {
804 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
805 }
806
807 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
808 tcg_target_long offset)
809 {
810 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
811 }
812
813 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
814 tcg_target_long offset)
815 {
816 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
817 }
818
819 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
820 tcg_target_long offset)
821 {
822 #ifdef HOST_WORDS_BIGENDIAN
823 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
824 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
825 #else
826 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
827 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
828 #endif
829 }
830
831 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
832 {
833 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
834 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
835 TCGV_HIGH(arg2));
836 /* Allow the optimizer room to replace add2 with two moves. */
837 tcg_gen_op0(INDEX_op_nop);
838 }
839
840 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
841 {
842 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
843 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
844 TCGV_HIGH(arg2));
845 /* Allow the optimizer room to replace sub2 with two moves. */
846 tcg_gen_op0(INDEX_op_nop);
847 }
848
849 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
850 {
851 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
852 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
853 }
854
855 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
856 {
857 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
858 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
859 }
860
861 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
862 {
863 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
864 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
865 }
866
867 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
868 {
869 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
870 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
871 }
872
873 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
874 {
875 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
876 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
877 }
878
879 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
880 {
881 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
882 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
883 }
884
885 /* XXX: use generic code when basic block handling is OK or CPU
886 specific code (x86) */
887 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
888 {
889 gen_helper_shl_i64(ret, arg1, arg2);
890 }
891
892 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
893 {
894 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
895 }
896
897 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
898 {
899 gen_helper_shr_i64(ret, arg1, arg2);
900 }
901
902 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
903 {
904 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
905 }
906
907 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
908 {
909 gen_helper_sar_i64(ret, arg1, arg2);
910 }
911
912 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
913 {
914 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
915 }
916
917 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
918 TCGv_i64 arg2, int label_index)
919 {
920 if (cond == TCG_COND_ALWAYS) {
921 tcg_gen_br(label_index);
922 } else if (cond != TCG_COND_NEVER) {
923 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
924 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
925 TCGV_HIGH(arg2), cond, label_index);
926 }
927 }
928
929 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
930 TCGv_i64 arg1, TCGv_i64 arg2)
931 {
932 if (cond == TCG_COND_ALWAYS) {
933 tcg_gen_movi_i32(TCGV_LOW(ret), 1);
934 } else if (cond == TCG_COND_NEVER) {
935 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
936 } else {
937 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
938 TCGV_LOW(arg1), TCGV_HIGH(arg1),
939 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
940 }
941 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
942 }
943
944 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
945 {
946 TCGv_i64 t0;
947 TCGv_i32 t1;
948
949 t0 = tcg_temp_new_i64();
950 t1 = tcg_temp_new_i32();
951
952 if (TCG_TARGET_HAS_mulu2_i32) {
953 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
954 TCGV_LOW(arg1), TCGV_LOW(arg2));
955 /* Allow the optimizer room to replace mulu2 with two moves. */
956 tcg_gen_op0(INDEX_op_nop);
957 } else {
958 tcg_debug_assert(TCG_TARGET_HAS_muluh_i32);
959 tcg_gen_op3_i32(INDEX_op_mul_i32, TCGV_LOW(t0),
960 TCGV_LOW(arg1), TCGV_LOW(arg2));
961 tcg_gen_op3_i32(INDEX_op_muluh_i32, TCGV_HIGH(t0),
962 TCGV_LOW(arg1), TCGV_LOW(arg2));
963 }
964
965 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
966 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
967 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
968 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
969
970 tcg_gen_mov_i64(ret, t0);
971 tcg_temp_free_i64(t0);
972 tcg_temp_free_i32(t1);
973 }
974
975 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
976 {
977 gen_helper_div_i64(ret, arg1, arg2);
978 }
979
980 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
981 {
982 gen_helper_rem_i64(ret, arg1, arg2);
983 }
984
985 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
986 {
987 gen_helper_divu_i64(ret, arg1, arg2);
988 }
989
990 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
991 {
992 gen_helper_remu_i64(ret, arg1, arg2);
993 }
994
995 #else
996
997 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
998 {
999 if (!TCGV_EQUAL_I64(ret, arg))
1000 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1001 }
1002
1003 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1004 {
1005 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
1006 }
1007
1008 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1009 tcg_target_long offset)
1010 {
1011 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1012 }
1013
1014 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1015 tcg_target_long offset)
1016 {
1017 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1018 }
1019
1020 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1021 tcg_target_long offset)
1022 {
1023 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1024 }
1025
1026 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1027 tcg_target_long offset)
1028 {
1029 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1030 }
1031
1032 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1033 tcg_target_long offset)
1034 {
1035 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1036 }
1037
1038 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1039 tcg_target_long offset)
1040 {
1041 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1042 }
1043
1044 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1045 {
1046 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1047 }
1048
1049 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1050 tcg_target_long offset)
1051 {
1052 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1053 }
1054
1055 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1056 tcg_target_long offset)
1057 {
1058 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1059 }
1060
1061 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1062 tcg_target_long offset)
1063 {
1064 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1065 }
1066
1067 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1068 {
1069 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1070 }
1071
1072 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1073 {
1074 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1075 }
1076
1077 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1078 {
1079 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1080 }
1081
1082 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1083 {
1084 if (TCGV_EQUAL_I64(arg1, arg2)) {
1085 tcg_gen_mov_i64(ret, arg1);
1086 } else {
1087 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1088 }
1089 }
1090
1091 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1092 {
1093 TCGv_i64 t0;
1094 /* Some cases can be optimized here. */
1095 switch (arg2) {
1096 case 0:
1097 tcg_gen_movi_i64(ret, 0);
1098 return;
1099 case 0xffffffffffffffffull:
1100 tcg_gen_mov_i64(ret, arg1);
1101 return;
1102 case 0xffull:
1103 /* Don't recurse with tcg_gen_ext8u_i32. */
1104 if (TCG_TARGET_HAS_ext8u_i64) {
1105 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1106 return;
1107 }
1108 break;
1109 case 0xffffu:
1110 if (TCG_TARGET_HAS_ext16u_i64) {
1111 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1112 return;
1113 }
1114 break;
1115 case 0xffffffffull:
1116 if (TCG_TARGET_HAS_ext32u_i64) {
1117 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1118 return;
1119 }
1120 break;
1121 }
1122 t0 = tcg_const_i64(arg2);
1123 tcg_gen_and_i64(ret, arg1, t0);
1124 tcg_temp_free_i64(t0);
1125 }
1126
1127 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1128 {
1129 if (TCGV_EQUAL_I64(arg1, arg2)) {
1130 tcg_gen_mov_i64(ret, arg1);
1131 } else {
1132 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1133 }
1134 }
1135
1136 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1137 {
1138 /* Some cases can be optimized here. */
1139 if (arg2 == -1) {
1140 tcg_gen_movi_i64(ret, -1);
1141 } else if (arg2 == 0) {
1142 tcg_gen_mov_i64(ret, arg1);
1143 } else {
1144 TCGv_i64 t0 = tcg_const_i64(arg2);
1145 tcg_gen_or_i64(ret, arg1, t0);
1146 tcg_temp_free_i64(t0);
1147 }
1148 }
1149
1150 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1151 {
1152 if (TCGV_EQUAL_I64(arg1, arg2)) {
1153 tcg_gen_movi_i64(ret, 0);
1154 } else {
1155 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1156 }
1157 }
1158
1159 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1160 {
1161 /* Some cases can be optimized here. */
1162 if (arg2 == 0) {
1163 tcg_gen_mov_i64(ret, arg1);
1164 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1165 /* Don't recurse with tcg_gen_not_i64. */
1166 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1167 } else {
1168 TCGv_i64 t0 = tcg_const_i64(arg2);
1169 tcg_gen_xor_i64(ret, arg1, t0);
1170 tcg_temp_free_i64(t0);
1171 }
1172 }
1173
1174 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1175 {
1176 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1177 }
1178
1179 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1180 {
1181 if (arg2 == 0) {
1182 tcg_gen_mov_i64(ret, arg1);
1183 } else {
1184 TCGv_i64 t0 = tcg_const_i64(arg2);
1185 tcg_gen_shl_i64(ret, arg1, t0);
1186 tcg_temp_free_i64(t0);
1187 }
1188 }
1189
1190 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1191 {
1192 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1193 }
1194
1195 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1196 {
1197 if (arg2 == 0) {
1198 tcg_gen_mov_i64(ret, arg1);
1199 } else {
1200 TCGv_i64 t0 = tcg_const_i64(arg2);
1201 tcg_gen_shr_i64(ret, arg1, t0);
1202 tcg_temp_free_i64(t0);
1203 }
1204 }
1205
1206 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1207 {
1208 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1209 }
1210
1211 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1212 {
1213 if (arg2 == 0) {
1214 tcg_gen_mov_i64(ret, arg1);
1215 } else {
1216 TCGv_i64 t0 = tcg_const_i64(arg2);
1217 tcg_gen_sar_i64(ret, arg1, t0);
1218 tcg_temp_free_i64(t0);
1219 }
1220 }
1221
1222 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
1223 TCGv_i64 arg2, int label_index)
1224 {
1225 if (cond == TCG_COND_ALWAYS) {
1226 tcg_gen_br(label_index);
1227 } else if (cond != TCG_COND_NEVER) {
1228 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
1229 }
1230 }
1231
1232 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1233 TCGv_i64 arg1, TCGv_i64 arg2)
1234 {
1235 if (cond == TCG_COND_ALWAYS) {
1236 tcg_gen_movi_i64(ret, 1);
1237 } else if (cond == TCG_COND_NEVER) {
1238 tcg_gen_movi_i64(ret, 0);
1239 } else {
1240 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1241 }
1242 }
1243
1244 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1245 {
1246 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1247 }
1248
1249 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1250 {
1251 if (TCG_TARGET_HAS_div_i64) {
1252 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1253 } else if (TCG_TARGET_HAS_div2_i64) {
1254 TCGv_i64 t0 = tcg_temp_new_i64();
1255 tcg_gen_sari_i64(t0, arg1, 63);
1256 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1257 tcg_temp_free_i64(t0);
1258 } else {
1259 gen_helper_div_i64(ret, arg1, arg2);
1260 }
1261 }
1262
1263 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1264 {
1265 if (TCG_TARGET_HAS_rem_i64) {
1266 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1267 } else if (TCG_TARGET_HAS_div_i64) {
1268 TCGv_i64 t0 = tcg_temp_new_i64();
1269 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1270 tcg_gen_mul_i64(t0, t0, arg2);
1271 tcg_gen_sub_i64(ret, arg1, t0);
1272 tcg_temp_free_i64(t0);
1273 } else if (TCG_TARGET_HAS_div2_i64) {
1274 TCGv_i64 t0 = tcg_temp_new_i64();
1275 tcg_gen_sari_i64(t0, arg1, 63);
1276 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1277 tcg_temp_free_i64(t0);
1278 } else {
1279 gen_helper_rem_i64(ret, arg1, arg2);
1280 }
1281 }
1282
1283 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1284 {
1285 if (TCG_TARGET_HAS_div_i64) {
1286 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1287 } else if (TCG_TARGET_HAS_div2_i64) {
1288 TCGv_i64 t0 = tcg_temp_new_i64();
1289 tcg_gen_movi_i64(t0, 0);
1290 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1291 tcg_temp_free_i64(t0);
1292 } else {
1293 gen_helper_divu_i64(ret, arg1, arg2);
1294 }
1295 }
1296
1297 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1298 {
1299 if (TCG_TARGET_HAS_rem_i64) {
1300 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1301 } else if (TCG_TARGET_HAS_div_i64) {
1302 TCGv_i64 t0 = tcg_temp_new_i64();
1303 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1304 tcg_gen_mul_i64(t0, t0, arg2);
1305 tcg_gen_sub_i64(ret, arg1, t0);
1306 tcg_temp_free_i64(t0);
1307 } else if (TCG_TARGET_HAS_div2_i64) {
1308 TCGv_i64 t0 = tcg_temp_new_i64();
1309 tcg_gen_movi_i64(t0, 0);
1310 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1311 tcg_temp_free_i64(t0);
1312 } else {
1313 gen_helper_remu_i64(ret, arg1, arg2);
1314 }
1315 }
1316 #endif /* TCG_TARGET_REG_BITS == 32 */
1317
1318 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1319 {
1320 /* some cases can be optimized here */
1321 if (arg2 == 0) {
1322 tcg_gen_mov_i64(ret, arg1);
1323 } else {
1324 TCGv_i64 t0 = tcg_const_i64(arg2);
1325 tcg_gen_add_i64(ret, arg1, t0);
1326 tcg_temp_free_i64(t0);
1327 }
1328 }
1329
1330 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1331 {
1332 TCGv_i64 t0 = tcg_const_i64(arg1);
1333 tcg_gen_sub_i64(ret, t0, arg2);
1334 tcg_temp_free_i64(t0);
1335 }
1336
1337 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1338 {
1339 /* some cases can be optimized here */
1340 if (arg2 == 0) {
1341 tcg_gen_mov_i64(ret, arg1);
1342 } else {
1343 TCGv_i64 t0 = tcg_const_i64(arg2);
1344 tcg_gen_sub_i64(ret, arg1, t0);
1345 tcg_temp_free_i64(t0);
1346 }
1347 }
1348 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1,
1349 int64_t arg2, int label_index)
1350 {
1351 if (cond == TCG_COND_ALWAYS) {
1352 tcg_gen_br(label_index);
1353 } else if (cond != TCG_COND_NEVER) {
1354 TCGv_i64 t0 = tcg_const_i64(arg2);
1355 tcg_gen_brcond_i64(cond, arg1, t0, label_index);
1356 tcg_temp_free_i64(t0);
1357 }
1358 }
1359
1360 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1361 TCGv_i64 arg1, int64_t arg2)
1362 {
1363 TCGv_i64 t0 = tcg_const_i64(arg2);
1364 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1365 tcg_temp_free_i64(t0);
1366 }
1367
1368 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1369 {
1370 TCGv_i64 t0 = tcg_const_i64(arg2);
1371 tcg_gen_mul_i64(ret, arg1, t0);
1372 tcg_temp_free_i64(t0);
1373 }
1374
1375
1376 /***************************************/
1377 /* optional operations */
1378
1379 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1380 {
1381 if (TCG_TARGET_HAS_ext8s_i32) {
1382 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1383 } else {
1384 tcg_gen_shli_i32(ret, arg, 24);
1385 tcg_gen_sari_i32(ret, ret, 24);
1386 }
1387 }
1388
1389 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1390 {
1391 if (TCG_TARGET_HAS_ext16s_i32) {
1392 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1393 } else {
1394 tcg_gen_shli_i32(ret, arg, 16);
1395 tcg_gen_sari_i32(ret, ret, 16);
1396 }
1397 }
1398
1399 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1400 {
1401 if (TCG_TARGET_HAS_ext8u_i32) {
1402 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1403 } else {
1404 tcg_gen_andi_i32(ret, arg, 0xffu);
1405 }
1406 }
1407
1408 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1409 {
1410 if (TCG_TARGET_HAS_ext16u_i32) {
1411 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1412 } else {
1413 tcg_gen_andi_i32(ret, arg, 0xffffu);
1414 }
1415 }
1416
1417 /* Note: we assume the two high bytes are set to zero */
1418 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1419 {
1420 if (TCG_TARGET_HAS_bswap16_i32) {
1421 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1422 } else {
1423 TCGv_i32 t0 = tcg_temp_new_i32();
1424
1425 tcg_gen_ext8u_i32(t0, arg);
1426 tcg_gen_shli_i32(t0, t0, 8);
1427 tcg_gen_shri_i32(ret, arg, 8);
1428 tcg_gen_or_i32(ret, ret, t0);
1429 tcg_temp_free_i32(t0);
1430 }
1431 }
1432
1433 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1434 {
1435 if (TCG_TARGET_HAS_bswap32_i32) {
1436 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1437 } else {
1438 TCGv_i32 t0, t1;
1439 t0 = tcg_temp_new_i32();
1440 t1 = tcg_temp_new_i32();
1441
1442 tcg_gen_shli_i32(t0, arg, 24);
1443
1444 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
1445 tcg_gen_shli_i32(t1, t1, 8);
1446 tcg_gen_or_i32(t0, t0, t1);
1447
1448 tcg_gen_shri_i32(t1, arg, 8);
1449 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
1450 tcg_gen_or_i32(t0, t0, t1);
1451
1452 tcg_gen_shri_i32(t1, arg, 24);
1453 tcg_gen_or_i32(ret, t0, t1);
1454 tcg_temp_free_i32(t0);
1455 tcg_temp_free_i32(t1);
1456 }
1457 }
1458
1459 #if TCG_TARGET_REG_BITS == 32
1460 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1461 {
1462 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1463 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1464 }
1465
1466 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1467 {
1468 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1469 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1470 }
1471
1472 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1473 {
1474 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1475 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1476 }
1477
1478 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1479 {
1480 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1481 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1482 }
1483
1484 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1485 {
1486 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1487 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1488 }
1489
1490 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1491 {
1492 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1493 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1494 }
1495
1496 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg,
1497 unsigned int count)
1498 {
1499 tcg_debug_assert(count < 64);
1500 if (count >= 32) {
1501 tcg_gen_shri_i32(ret, TCGV_HIGH(arg), count - 32);
1502 } else if (count == 0) {
1503 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1504 } else {
1505 TCGv_i64 t = tcg_temp_new_i64();
1506 tcg_gen_shri_i64(t, arg, count);
1507 tcg_gen_mov_i32(ret, TCGV_LOW(t));
1508 tcg_temp_free_i64(t);
1509 }
1510 }
1511
1512 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1513 {
1514 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1515 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1516 }
1517
1518 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1519 {
1520 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1521 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1522 }
1523
1524 /* Note: we assume the six high bytes are set to zero */
1525 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1526 {
1527 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1528 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1529 }
1530
1531 /* Note: we assume the four high bytes are set to zero */
1532 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1533 {
1534 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1535 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1536 }
1537
1538 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1539 {
1540 TCGv_i32 t0, t1;
1541 t0 = tcg_temp_new_i32();
1542 t1 = tcg_temp_new_i32();
1543
1544 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1545 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1546 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1547 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1548 tcg_temp_free_i32(t0);
1549 tcg_temp_free_i32(t1);
1550 }
1551 #else
1552
1553 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1554 {
1555 if (TCG_TARGET_HAS_ext8s_i64) {
1556 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1557 } else {
1558 tcg_gen_shli_i64(ret, arg, 56);
1559 tcg_gen_sari_i64(ret, ret, 56);
1560 }
1561 }
1562
1563 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1564 {
1565 if (TCG_TARGET_HAS_ext16s_i64) {
1566 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1567 } else {
1568 tcg_gen_shli_i64(ret, arg, 48);
1569 tcg_gen_sari_i64(ret, ret, 48);
1570 }
1571 }
1572
1573 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1574 {
1575 if (TCG_TARGET_HAS_ext32s_i64) {
1576 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1577 } else {
1578 tcg_gen_shli_i64(ret, arg, 32);
1579 tcg_gen_sari_i64(ret, ret, 32);
1580 }
1581 }
1582
1583 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1584 {
1585 if (TCG_TARGET_HAS_ext8u_i64) {
1586 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1587 } else {
1588 tcg_gen_andi_i64(ret, arg, 0xffu);
1589 }
1590 }
1591
1592 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1593 {
1594 if (TCG_TARGET_HAS_ext16u_i64) {
1595 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1596 } else {
1597 tcg_gen_andi_i64(ret, arg, 0xffffu);
1598 }
1599 }
1600
1601 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1602 {
1603 if (TCG_TARGET_HAS_ext32u_i64) {
1604 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1605 } else {
1606 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1607 }
1608 }
1609
1610 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg,
1611 unsigned int count)
1612 {
1613 tcg_debug_assert(count < 64);
1614 if (TCG_TARGET_HAS_trunc_shr_i32) {
1615 tcg_gen_op3i_i32(INDEX_op_trunc_shr_i32, ret,
1616 MAKE_TCGV_I32(GET_TCGV_I64(arg)), count);
1617 } else if (count == 0) {
1618 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1619 } else {
1620 TCGv_i64 t = tcg_temp_new_i64();
1621 tcg_gen_shri_i64(t, arg, count);
1622 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1623 tcg_temp_free_i64(t);
1624 }
1625 }
1626
1627 /* Note: we assume the target supports move between 32 and 64 bit
1628 registers */
1629 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1630 {
1631 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1632 }
1633
1634 /* Note: we assume the target supports move between 32 and 64 bit
1635 registers */
1636 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1637 {
1638 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1639 }
1640
1641 /* Note: we assume the six high bytes are set to zero */
1642 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1643 {
1644 if (TCG_TARGET_HAS_bswap16_i64) {
1645 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1646 } else {
1647 TCGv_i64 t0 = tcg_temp_new_i64();
1648
1649 tcg_gen_ext8u_i64(t0, arg);
1650 tcg_gen_shli_i64(t0, t0, 8);
1651 tcg_gen_shri_i64(ret, arg, 8);
1652 tcg_gen_or_i64(ret, ret, t0);
1653 tcg_temp_free_i64(t0);
1654 }
1655 }
1656
1657 /* Note: we assume the four high bytes are set to zero */
1658 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1659 {
1660 if (TCG_TARGET_HAS_bswap32_i64) {
1661 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1662 } else {
1663 TCGv_i64 t0, t1;
1664 t0 = tcg_temp_new_i64();
1665 t1 = tcg_temp_new_i64();
1666
1667 tcg_gen_shli_i64(t0, arg, 24);
1668 tcg_gen_ext32u_i64(t0, t0);
1669
1670 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1671 tcg_gen_shli_i64(t1, t1, 8);
1672 tcg_gen_or_i64(t0, t0, t1);
1673
1674 tcg_gen_shri_i64(t1, arg, 8);
1675 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1676 tcg_gen_or_i64(t0, t0, t1);
1677
1678 tcg_gen_shri_i64(t1, arg, 24);
1679 tcg_gen_or_i64(ret, t0, t1);
1680 tcg_temp_free_i64(t0);
1681 tcg_temp_free_i64(t1);
1682 }
1683 }
1684
1685 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1686 {
1687 if (TCG_TARGET_HAS_bswap64_i64) {
1688 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1689 } else {
1690 TCGv_i64 t0 = tcg_temp_new_i64();
1691 TCGv_i64 t1 = tcg_temp_new_i64();
1692
1693 tcg_gen_shli_i64(t0, arg, 56);
1694
1695 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1696 tcg_gen_shli_i64(t1, t1, 40);
1697 tcg_gen_or_i64(t0, t0, t1);
1698
1699 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1700 tcg_gen_shli_i64(t1, t1, 24);
1701 tcg_gen_or_i64(t0, t0, t1);
1702
1703 tcg_gen_andi_i64(t1, arg, 0xff000000);
1704 tcg_gen_shli_i64(t1, t1, 8);
1705 tcg_gen_or_i64(t0, t0, t1);
1706
1707 tcg_gen_shri_i64(t1, arg, 8);
1708 tcg_gen_andi_i64(t1, t1, 0xff000000);
1709 tcg_gen_or_i64(t0, t0, t1);
1710
1711 tcg_gen_shri_i64(t1, arg, 24);
1712 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1713 tcg_gen_or_i64(t0, t0, t1);
1714
1715 tcg_gen_shri_i64(t1, arg, 40);
1716 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1717 tcg_gen_or_i64(t0, t0, t1);
1718
1719 tcg_gen_shri_i64(t1, arg, 56);
1720 tcg_gen_or_i64(ret, t0, t1);
1721 tcg_temp_free_i64(t0);
1722 tcg_temp_free_i64(t1);
1723 }
1724 }
1725
1726 #endif
1727
1728 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
1729 {
1730 if (TCG_TARGET_HAS_neg_i32) {
1731 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
1732 } else {
1733 TCGv_i32 t0 = tcg_const_i32(0);
1734 tcg_gen_sub_i32(ret, t0, arg);
1735 tcg_temp_free_i32(t0);
1736 }
1737 }
1738
1739 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1740 {
1741 if (TCG_TARGET_HAS_neg_i64) {
1742 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1743 } else {
1744 TCGv_i64 t0 = tcg_const_i64(0);
1745 tcg_gen_sub_i64(ret, t0, arg);
1746 tcg_temp_free_i64(t0);
1747 }
1748 }
1749
1750 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
1751 {
1752 if (TCG_TARGET_HAS_not_i32) {
1753 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
1754 } else {
1755 tcg_gen_xori_i32(ret, arg, -1);
1756 }
1757 }
1758
1759 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1760 {
1761 #if TCG_TARGET_REG_BITS == 64
1762 if (TCG_TARGET_HAS_not_i64) {
1763 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1764 } else {
1765 tcg_gen_xori_i64(ret, arg, -1);
1766 }
1767 #else
1768 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1769 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1770 #endif
1771 }
1772
1773 static inline void tcg_gen_discard_i32(TCGv_i32 arg)
1774 {
1775 tcg_gen_op1_i32(INDEX_op_discard, arg);
1776 }
1777
1778 static inline void tcg_gen_discard_i64(TCGv_i64 arg)
1779 {
1780 #if TCG_TARGET_REG_BITS == 32
1781 tcg_gen_discard_i32(TCGV_LOW(arg));
1782 tcg_gen_discard_i32(TCGV_HIGH(arg));
1783 #else
1784 tcg_gen_op1_i64(INDEX_op_discard, arg);
1785 #endif
1786 }
1787
1788 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1789 {
1790 if (TCG_TARGET_HAS_andc_i32) {
1791 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
1792 } else {
1793 TCGv_i32 t0 = tcg_temp_new_i32();
1794 tcg_gen_not_i32(t0, arg2);
1795 tcg_gen_and_i32(ret, arg1, t0);
1796 tcg_temp_free_i32(t0);
1797 }
1798 }
1799
1800 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1801 {
1802 #if TCG_TARGET_REG_BITS == 64
1803 if (TCG_TARGET_HAS_andc_i64) {
1804 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1805 } else {
1806 TCGv_i64 t0 = tcg_temp_new_i64();
1807 tcg_gen_not_i64(t0, arg2);
1808 tcg_gen_and_i64(ret, arg1, t0);
1809 tcg_temp_free_i64(t0);
1810 }
1811 #else
1812 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1813 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1814 #endif
1815 }
1816
1817 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1818 {
1819 if (TCG_TARGET_HAS_eqv_i32) {
1820 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
1821 } else {
1822 tcg_gen_xor_i32(ret, arg1, arg2);
1823 tcg_gen_not_i32(ret, ret);
1824 }
1825 }
1826
1827 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1828 {
1829 #if TCG_TARGET_REG_BITS == 64
1830 if (TCG_TARGET_HAS_eqv_i64) {
1831 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1832 } else {
1833 tcg_gen_xor_i64(ret, arg1, arg2);
1834 tcg_gen_not_i64(ret, ret);
1835 }
1836 #else
1837 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1838 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1839 #endif
1840 }
1841
1842 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1843 {
1844 if (TCG_TARGET_HAS_nand_i32) {
1845 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
1846 } else {
1847 tcg_gen_and_i32(ret, arg1, arg2);
1848 tcg_gen_not_i32(ret, ret);
1849 }
1850 }
1851
1852 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1853 {
1854 #if TCG_TARGET_REG_BITS == 64
1855 if (TCG_TARGET_HAS_nand_i64) {
1856 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1857 } else {
1858 tcg_gen_and_i64(ret, arg1, arg2);
1859 tcg_gen_not_i64(ret, ret);
1860 }
1861 #else
1862 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1863 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1864 #endif
1865 }
1866
1867 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1868 {
1869 if (TCG_TARGET_HAS_nor_i32) {
1870 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
1871 } else {
1872 tcg_gen_or_i32(ret, arg1, arg2);
1873 tcg_gen_not_i32(ret, ret);
1874 }
1875 }
1876
1877 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1878 {
1879 #if TCG_TARGET_REG_BITS == 64
1880 if (TCG_TARGET_HAS_nor_i64) {
1881 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1882 } else {
1883 tcg_gen_or_i64(ret, arg1, arg2);
1884 tcg_gen_not_i64(ret, ret);
1885 }
1886 #else
1887 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1888 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1889 #endif
1890 }
1891
1892 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1893 {
1894 if (TCG_TARGET_HAS_orc_i32) {
1895 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
1896 } else {
1897 TCGv_i32 t0 = tcg_temp_new_i32();
1898 tcg_gen_not_i32(t0, arg2);
1899 tcg_gen_or_i32(ret, arg1, t0);
1900 tcg_temp_free_i32(t0);
1901 }
1902 }
1903
1904 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1905 {
1906 #if TCG_TARGET_REG_BITS == 64
1907 if (TCG_TARGET_HAS_orc_i64) {
1908 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1909 } else {
1910 TCGv_i64 t0 = tcg_temp_new_i64();
1911 tcg_gen_not_i64(t0, arg2);
1912 tcg_gen_or_i64(ret, arg1, t0);
1913 tcg_temp_free_i64(t0);
1914 }
1915 #else
1916 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1917 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1918 #endif
1919 }
1920
1921 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1922 {
1923 if (TCG_TARGET_HAS_rot_i32) {
1924 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
1925 } else {
1926 TCGv_i32 t0, t1;
1927
1928 t0 = tcg_temp_new_i32();
1929 t1 = tcg_temp_new_i32();
1930 tcg_gen_shl_i32(t0, arg1, arg2);
1931 tcg_gen_subfi_i32(t1, 32, arg2);
1932 tcg_gen_shr_i32(t1, arg1, t1);
1933 tcg_gen_or_i32(ret, t0, t1);
1934 tcg_temp_free_i32(t0);
1935 tcg_temp_free_i32(t1);
1936 }
1937 }
1938
1939 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1940 {
1941 if (TCG_TARGET_HAS_rot_i64) {
1942 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1943 } else {
1944 TCGv_i64 t0, t1;
1945 t0 = tcg_temp_new_i64();
1946 t1 = tcg_temp_new_i64();
1947 tcg_gen_shl_i64(t0, arg1, arg2);
1948 tcg_gen_subfi_i64(t1, 64, arg2);
1949 tcg_gen_shr_i64(t1, arg1, t1);
1950 tcg_gen_or_i64(ret, t0, t1);
1951 tcg_temp_free_i64(t0);
1952 tcg_temp_free_i64(t1);
1953 }
1954 }
1955
1956 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
1957 {
1958 /* some cases can be optimized here */
1959 if (arg2 == 0) {
1960 tcg_gen_mov_i32(ret, arg1);
1961 } else if (TCG_TARGET_HAS_rot_i32) {
1962 TCGv_i32 t0 = tcg_const_i32(arg2);
1963 tcg_gen_rotl_i32(ret, arg1, t0);
1964 tcg_temp_free_i32(t0);
1965 } else {
1966 TCGv_i32 t0, t1;
1967 t0 = tcg_temp_new_i32();
1968 t1 = tcg_temp_new_i32();
1969 tcg_gen_shli_i32(t0, arg1, arg2);
1970 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
1971 tcg_gen_or_i32(ret, t0, t1);
1972 tcg_temp_free_i32(t0);
1973 tcg_temp_free_i32(t1);
1974 }
1975 }
1976
1977 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1978 {
1979 /* some cases can be optimized here */
1980 if (arg2 == 0) {
1981 tcg_gen_mov_i64(ret, arg1);
1982 } else if (TCG_TARGET_HAS_rot_i64) {
1983 TCGv_i64 t0 = tcg_const_i64(arg2);
1984 tcg_gen_rotl_i64(ret, arg1, t0);
1985 tcg_temp_free_i64(t0);
1986 } else {
1987 TCGv_i64 t0, t1;
1988 t0 = tcg_temp_new_i64();
1989 t1 = tcg_temp_new_i64();
1990 tcg_gen_shli_i64(t0, arg1, arg2);
1991 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1992 tcg_gen_or_i64(ret, t0, t1);
1993 tcg_temp_free_i64(t0);
1994 tcg_temp_free_i64(t1);
1995 }
1996 }
1997
1998 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1999 {
2000 if (TCG_TARGET_HAS_rot_i32) {
2001 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
2002 } else {
2003 TCGv_i32 t0, t1;
2004
2005 t0 = tcg_temp_new_i32();
2006 t1 = tcg_temp_new_i32();
2007 tcg_gen_shr_i32(t0, arg1, arg2);
2008 tcg_gen_subfi_i32(t1, 32, arg2);
2009 tcg_gen_shl_i32(t1, arg1, t1);
2010 tcg_gen_or_i32(ret, t0, t1);
2011 tcg_temp_free_i32(t0);
2012 tcg_temp_free_i32(t1);
2013 }
2014 }
2015
2016 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2017 {
2018 if (TCG_TARGET_HAS_rot_i64) {
2019 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2020 } else {
2021 TCGv_i64 t0, t1;
2022 t0 = tcg_temp_new_i64();
2023 t1 = tcg_temp_new_i64();
2024 tcg_gen_shr_i64(t0, arg1, arg2);
2025 tcg_gen_subfi_i64(t1, 64, arg2);
2026 tcg_gen_shl_i64(t1, arg1, t1);
2027 tcg_gen_or_i64(ret, t0, t1);
2028 tcg_temp_free_i64(t0);
2029 tcg_temp_free_i64(t1);
2030 }
2031 }
2032
2033 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2034 {
2035 /* some cases can be optimized here */
2036 if (arg2 == 0) {
2037 tcg_gen_mov_i32(ret, arg1);
2038 } else {
2039 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
2040 }
2041 }
2042
2043 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2044 {
2045 /* some cases can be optimized here */
2046 if (arg2 == 0) {
2047 tcg_gen_mov_i64(ret, arg1);
2048 } else {
2049 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2050 }
2051 }
2052
2053 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1,
2054 TCGv_i32 arg2, unsigned int ofs,
2055 unsigned int len)
2056 {
2057 uint32_t mask;
2058 TCGv_i32 t1;
2059
2060 tcg_debug_assert(ofs < 32);
2061 tcg_debug_assert(len <= 32);
2062 tcg_debug_assert(ofs + len <= 32);
2063
2064 if (ofs == 0 && len == 32) {
2065 tcg_gen_mov_i32(ret, arg2);
2066 return;
2067 }
2068 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
2069 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
2070 return;
2071 }
2072
2073 mask = (1u << len) - 1;
2074 t1 = tcg_temp_new_i32();
2075
2076 if (ofs + len < 32) {
2077 tcg_gen_andi_i32(t1, arg2, mask);
2078 tcg_gen_shli_i32(t1, t1, ofs);
2079 } else {
2080 tcg_gen_shli_i32(t1, arg2, ofs);
2081 }
2082 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
2083 tcg_gen_or_i32(ret, ret, t1);
2084
2085 tcg_temp_free_i32(t1);
2086 }
2087
2088 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1,
2089 TCGv_i64 arg2, unsigned int ofs,
2090 unsigned int len)
2091 {
2092 uint64_t mask;
2093 TCGv_i64 t1;
2094
2095 tcg_debug_assert(ofs < 64);
2096 tcg_debug_assert(len <= 64);
2097 tcg_debug_assert(ofs + len <= 64);
2098
2099 if (ofs == 0 && len == 64) {
2100 tcg_gen_mov_i64(ret, arg2);
2101 return;
2102 }
2103 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2104 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2105 return;
2106 }
2107
2108 #if TCG_TARGET_REG_BITS == 32
2109 if (ofs >= 32) {
2110 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2111 TCGV_LOW(arg2), ofs - 32, len);
2112 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2113 return;
2114 }
2115 if (ofs + len <= 32) {
2116 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2117 TCGV_LOW(arg2), ofs, len);
2118 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2119 return;
2120 }
2121 #endif
2122
2123 mask = (1ull << len) - 1;
2124 t1 = tcg_temp_new_i64();
2125
2126 if (ofs + len < 64) {
2127 tcg_gen_andi_i64(t1, arg2, mask);
2128 tcg_gen_shli_i64(t1, t1, ofs);
2129 } else {
2130 tcg_gen_shli_i64(t1, arg2, ofs);
2131 }
2132 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2133 tcg_gen_or_i64(ret, ret, t1);
2134
2135 tcg_temp_free_i64(t1);
2136 }
2137
2138 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low,
2139 TCGv_i32 high)
2140 {
2141 #if TCG_TARGET_REG_BITS == 32
2142 tcg_gen_mov_i32(TCGV_LOW(dest), low);
2143 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2144 #else
2145 TCGv_i64 tmp = tcg_temp_new_i64();
2146 /* These extensions are only needed for type correctness.
2147 We may be able to do better given target specific information. */
2148 tcg_gen_extu_i32_i64(tmp, high);
2149 tcg_gen_extu_i32_i64(dest, low);
2150 /* If deposit is available, use it. Otherwise use the extra
2151 knowledge that we have of the zero-extensions above. */
2152 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2153 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2154 } else {
2155 tcg_gen_shli_i64(tmp, tmp, 32);
2156 tcg_gen_or_i64(dest, dest, tmp);
2157 }
2158 tcg_temp_free_i64(tmp);
2159 #endif
2160 }
2161
2162 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low,
2163 TCGv_i64 high)
2164 {
2165 tcg_gen_deposit_i64(dest, low, high, 32, 32);
2166 }
2167
2168 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2169 {
2170 tcg_gen_trunc_shr_i64_i32(ret, arg, 0);
2171 }
2172
2173 static inline void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2174 {
2175 tcg_gen_trunc_shr_i64_i32(lo, arg, 0);
2176 tcg_gen_trunc_shr_i64_i32(hi, arg, 32);
2177 }
2178
2179 static inline void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2180 {
2181 tcg_gen_ext32u_i64(lo, arg);
2182 tcg_gen_shri_i64(hi, arg, 32);
2183 }
2184
2185 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret,
2186 TCGv_i32 c1, TCGv_i32 c2,
2187 TCGv_i32 v1, TCGv_i32 v2)
2188 {
2189 if (TCG_TARGET_HAS_movcond_i32) {
2190 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
2191 } else {
2192 TCGv_i32 t0 = tcg_temp_new_i32();
2193 TCGv_i32 t1 = tcg_temp_new_i32();
2194 tcg_gen_setcond_i32(cond, t0, c1, c2);
2195 tcg_gen_neg_i32(t0, t0);
2196 tcg_gen_and_i32(t1, v1, t0);
2197 tcg_gen_andc_i32(ret, v2, t0);
2198 tcg_gen_or_i32(ret, ret, t1);
2199 tcg_temp_free_i32(t0);
2200 tcg_temp_free_i32(t1);
2201 }
2202 }
2203
2204 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret,
2205 TCGv_i64 c1, TCGv_i64 c2,
2206 TCGv_i64 v1, TCGv_i64 v2)
2207 {
2208 #if TCG_TARGET_REG_BITS == 32
2209 TCGv_i32 t0 = tcg_temp_new_i32();
2210 TCGv_i32 t1 = tcg_temp_new_i32();
2211 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2212 TCGV_LOW(c1), TCGV_HIGH(c1),
2213 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2214
2215 if (TCG_TARGET_HAS_movcond_i32) {
2216 tcg_gen_movi_i32(t1, 0);
2217 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2218 TCGV_LOW(v1), TCGV_LOW(v2));
2219 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2220 TCGV_HIGH(v1), TCGV_HIGH(v2));
2221 } else {
2222 tcg_gen_neg_i32(t0, t0);
2223
2224 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2225 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2226 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2227
2228 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2229 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2230 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2231 }
2232 tcg_temp_free_i32(t0);
2233 tcg_temp_free_i32(t1);
2234 #else
2235 if (TCG_TARGET_HAS_movcond_i64) {
2236 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2237 } else {
2238 TCGv_i64 t0 = tcg_temp_new_i64();
2239 TCGv_i64 t1 = tcg_temp_new_i64();
2240 tcg_gen_setcond_i64(cond, t0, c1, c2);
2241 tcg_gen_neg_i64(t0, t0);
2242 tcg_gen_and_i64(t1, v1, t0);
2243 tcg_gen_andc_i64(ret, v2, t0);
2244 tcg_gen_or_i64(ret, ret, t1);
2245 tcg_temp_free_i64(t0);
2246 tcg_temp_free_i64(t1);
2247 }
2248 #endif
2249 }
2250
2251 static inline void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2252 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2253 {
2254 if (TCG_TARGET_HAS_add2_i32) {
2255 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
2256 /* Allow the optimizer room to replace add2 with two moves. */
2257 tcg_gen_op0(INDEX_op_nop);
2258 } else {
2259 TCGv_i64 t0 = tcg_temp_new_i64();
2260 TCGv_i64 t1 = tcg_temp_new_i64();
2261 tcg_gen_concat_i32_i64(t0, al, ah);
2262 tcg_gen_concat_i32_i64(t1, bl, bh);
2263 tcg_gen_add_i64(t0, t0, t1);
2264 tcg_gen_extr_i64_i32(rl, rh, t0);
2265 tcg_temp_free_i64(t0);
2266 tcg_temp_free_i64(t1);
2267 }
2268 }
2269
2270 static inline void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2271 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2272 {
2273 if (TCG_TARGET_HAS_sub2_i32) {
2274 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
2275 /* Allow the optimizer room to replace sub2 with two moves. */
2276 tcg_gen_op0(INDEX_op_nop);
2277 } else {
2278 TCGv_i64 t0 = tcg_temp_new_i64();
2279 TCGv_i64 t1 = tcg_temp_new_i64();
2280 tcg_gen_concat_i32_i64(t0, al, ah);
2281 tcg_gen_concat_i32_i64(t1, bl, bh);
2282 tcg_gen_sub_i64(t0, t0, t1);
2283 tcg_gen_extr_i64_i32(rl, rh, t0);
2284 tcg_temp_free_i64(t0);
2285 tcg_temp_free_i64(t1);
2286 }
2287 }
2288
2289 static inline void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh,
2290 TCGv_i32 arg1, TCGv_i32 arg2)
2291 {
2292 if (TCG_TARGET_HAS_mulu2_i32) {
2293 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
2294 /* Allow the optimizer room to replace mulu2 with two moves. */
2295 tcg_gen_op0(INDEX_op_nop);
2296 } else if (TCG_TARGET_HAS_muluh_i32) {
2297 TCGv_i32 t = tcg_temp_new_i32();
2298 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2299 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
2300 tcg_gen_mov_i32(rl, t);
2301 tcg_temp_free_i32(t);
2302 } else {
2303 TCGv_i64 t0 = tcg_temp_new_i64();
2304 TCGv_i64 t1 = tcg_temp_new_i64();
2305 tcg_gen_extu_i32_i64(t0, arg1);
2306 tcg_gen_extu_i32_i64(t1, arg2);
2307 tcg_gen_mul_i64(t0, t0, t1);
2308 tcg_gen_extr_i64_i32(rl, rh, t0);
2309 tcg_temp_free_i64(t0);
2310 tcg_temp_free_i64(t1);
2311 }
2312 }
2313
2314 static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh,
2315 TCGv_i32 arg1, TCGv_i32 arg2)
2316 {
2317 if (TCG_TARGET_HAS_muls2_i32) {
2318 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
2319 /* Allow the optimizer room to replace muls2 with two moves. */
2320 tcg_gen_op0(INDEX_op_nop);
2321 } else if (TCG_TARGET_HAS_mulsh_i32) {
2322 TCGv_i32 t = tcg_temp_new_i32();
2323 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2324 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
2325 tcg_gen_mov_i32(rl, t);
2326 tcg_temp_free_i32(t);
2327 } else if (TCG_TARGET_REG_BITS == 32) {
2328 TCGv_i32 t0 = tcg_temp_new_i32();
2329 TCGv_i32 t1 = tcg_temp_new_i32();
2330 TCGv_i32 t2 = tcg_temp_new_i32();
2331 TCGv_i32 t3 = tcg_temp_new_i32();
2332 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
2333 /* Adjust for negative inputs. */
2334 tcg_gen_sari_i32(t2, arg1, 31);
2335 tcg_gen_sari_i32(t3, arg2, 31);
2336 tcg_gen_and_i32(t2, t2, arg2);
2337 tcg_gen_and_i32(t3, t3, arg1);
2338 tcg_gen_sub_i32(rh, t1, t2);
2339 tcg_gen_sub_i32(rh, rh, t3);
2340 tcg_gen_mov_i32(rl, t0);
2341 tcg_temp_free_i32(t0);
2342 tcg_temp_free_i32(t1);
2343 tcg_temp_free_i32(t2);
2344 tcg_temp_free_i32(t3);
2345 } else {
2346 TCGv_i64 t0 = tcg_temp_new_i64();
2347 TCGv_i64 t1 = tcg_temp_new_i64();
2348 tcg_gen_ext_i32_i64(t0, arg1);
2349 tcg_gen_ext_i32_i64(t1, arg2);
2350 tcg_gen_mul_i64(t0, t0, t1);
2351 tcg_gen_extr_i64_i32(rl, rh, t0);
2352 tcg_temp_free_i64(t0);
2353 tcg_temp_free_i64(t1);
2354 }
2355 }
2356
2357 static inline void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2358 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2359 {
2360 if (TCG_TARGET_HAS_add2_i64) {
2361 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2362 /* Allow the optimizer room to replace add2 with two moves. */
2363 tcg_gen_op0(INDEX_op_nop);
2364 } else {
2365 TCGv_i64 t0 = tcg_temp_new_i64();
2366 TCGv_i64 t1 = tcg_temp_new_i64();
2367 tcg_gen_add_i64(t0, al, bl);
2368 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2369 tcg_gen_add_i64(rh, ah, bh);
2370 tcg_gen_add_i64(rh, rh, t1);
2371 tcg_gen_mov_i64(rl, t0);
2372 tcg_temp_free_i64(t0);
2373 tcg_temp_free_i64(t1);
2374 }
2375 }
2376
2377 static inline void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2378 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2379 {
2380 if (TCG_TARGET_HAS_sub2_i64) {
2381 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2382 /* Allow the optimizer room to replace sub2 with two moves. */
2383 tcg_gen_op0(INDEX_op_nop);
2384 } else {
2385 TCGv_i64 t0 = tcg_temp_new_i64();
2386 TCGv_i64 t1 = tcg_temp_new_i64();
2387 tcg_gen_sub_i64(t0, al, bl);
2388 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2389 tcg_gen_sub_i64(rh, ah, bh);
2390 tcg_gen_sub_i64(rh, rh, t1);
2391 tcg_gen_mov_i64(rl, t0);
2392 tcg_temp_free_i64(t0);
2393 tcg_temp_free_i64(t1);
2394 }
2395 }
2396
2397 static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh,
2398 TCGv_i64 arg1, TCGv_i64 arg2)
2399 {
2400 if (TCG_TARGET_HAS_mulu2_i64) {
2401 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2402 /* Allow the optimizer room to replace mulu2 with two moves. */
2403 tcg_gen_op0(INDEX_op_nop);
2404 } else if (TCG_TARGET_HAS_muluh_i64) {
2405 TCGv_i64 t = tcg_temp_new_i64();
2406 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2407 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2408 tcg_gen_mov_i64(rl, t);
2409 tcg_temp_free_i64(t);
2410 } else {
2411 TCGv_i64 t0 = tcg_temp_new_i64();
2412 tcg_gen_mul_i64(t0, arg1, arg2);
2413 gen_helper_muluh_i64(rh, arg1, arg2);
2414 tcg_gen_mov_i64(rl, t0);
2415 tcg_temp_free_i64(t0);
2416 }
2417 }
2418
2419 static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
2420 TCGv_i64 arg1, TCGv_i64 arg2)
2421 {
2422 if (TCG_TARGET_HAS_muls2_i64) {
2423 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2424 /* Allow the optimizer room to replace muls2 with two moves. */
2425 tcg_gen_op0(INDEX_op_nop);
2426 } else if (TCG_TARGET_HAS_mulsh_i64) {
2427 TCGv_i64 t = tcg_temp_new_i64();
2428 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2429 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2430 tcg_gen_mov_i64(rl, t);
2431 tcg_temp_free_i64(t);
2432 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2433 TCGv_i64 t0 = tcg_temp_new_i64();
2434 TCGv_i64 t1 = tcg_temp_new_i64();
2435 TCGv_i64 t2 = tcg_temp_new_i64();
2436 TCGv_i64 t3 = tcg_temp_new_i64();
2437 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2438 /* Adjust for negative inputs. */
2439 tcg_gen_sari_i64(t2, arg1, 63);
2440 tcg_gen_sari_i64(t3, arg2, 63);
2441 tcg_gen_and_i64(t2, t2, arg2);
2442 tcg_gen_and_i64(t3, t3, arg1);
2443 tcg_gen_sub_i64(rh, t1, t2);
2444 tcg_gen_sub_i64(rh, rh, t3);
2445 tcg_gen_mov_i64(rl, t0);
2446 tcg_temp_free_i64(t0);
2447 tcg_temp_free_i64(t1);
2448 tcg_temp_free_i64(t2);
2449 tcg_temp_free_i64(t3);
2450 } else {
2451 TCGv_i64 t0 = tcg_temp_new_i64();
2452 tcg_gen_mul_i64(t0, arg1, arg2);
2453 gen_helper_mulsh_i64(rh, arg1, arg2);
2454 tcg_gen_mov_i64(rl, t0);
2455 tcg_temp_free_i64(t0);
2456 }
2457 }
2458
2459 /***************************************/
2460 /* QEMU specific operations. Their type depend on the QEMU CPU
2461 type. */
2462 #ifndef TARGET_LONG_BITS
2463 #error must include QEMU headers
2464 #endif
2465
2466 #if TARGET_LONG_BITS == 32
2467 #define TCGv TCGv_i32
2468 #define tcg_temp_new() tcg_temp_new_i32()
2469 #define tcg_global_reg_new tcg_global_reg_new_i32
2470 #define tcg_global_mem_new tcg_global_mem_new_i32
2471 #define tcg_temp_local_new() tcg_temp_local_new_i32()
2472 #define tcg_temp_free tcg_temp_free_i32
2473 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2474 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x)
2475 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2476 #define tcg_add_param_tl tcg_add_param_i32
2477 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i32
2478 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i32
2479 #else
2480 #define TCGv TCGv_i64
2481 #define tcg_temp_new() tcg_temp_new_i64()
2482 #define tcg_global_reg_new tcg_global_reg_new_i64
2483 #define tcg_global_mem_new tcg_global_mem_new_i64
2484 #define tcg_temp_local_new() tcg_temp_local_new_i64()
2485 #define tcg_temp_free tcg_temp_free_i64
2486 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2487 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x)
2488 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2489 #define tcg_add_param_tl tcg_add_param_i64
2490 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i64
2491 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i64
2492 #endif
2493
2494 /* debug info: write the PC of the corresponding QEMU CPU instruction */
2495 static inline void tcg_gen_debug_insn_start(uint64_t pc)
2496 {
2497 /* XXX: must really use a 32 bit size for TCGArg in all cases */
2498 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2499 tcg_gen_op2ii(INDEX_op_debug_insn_start,
2500 (uint32_t)(pc), (uint32_t)(pc >> 32));
2501 #else
2502 tcg_gen_op1i(INDEX_op_debug_insn_start, pc);
2503 #endif
2504 }
2505
2506 static inline void tcg_gen_exit_tb(uintptr_t val)
2507 {
2508 tcg_gen_op1i(INDEX_op_exit_tb, val);
2509 }
2510
2511 static inline void tcg_gen_goto_tb(unsigned idx)
2512 {
2513 /* We only support two chained exits. */
2514 tcg_debug_assert(idx <= 1);
2515 #ifdef CONFIG_DEBUG_TCG
2516 /* Verify that we havn't seen this numbered exit before. */
2517 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
2518 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
2519 #endif
2520 tcg_gen_op1i(INDEX_op_goto_tb, idx);
2521 }
2522
2523
2524 void tcg_gen_qemu_ld_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
2525 void tcg_gen_qemu_st_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
2526 void tcg_gen_qemu_ld_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
2527 void tcg_gen_qemu_st_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
2528
2529 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2530 {
2531 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_UB);
2532 }
2533
2534 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2535 {
2536 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_SB);
2537 }
2538
2539 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2540 {
2541 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUW);
2542 }
2543
2544 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2545 {
2546 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESW);
2547 }
2548
2549 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2550 {
2551 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUL);
2552 }
2553
2554 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2555 {
2556 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESL);
2557 }
2558
2559 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2560 {
2561 tcg_gen_qemu_ld_i64(ret, addr, mem_index, MO_TEQ);
2562 }
2563
2564 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2565 {
2566 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_UB);
2567 }
2568
2569 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2570 {
2571 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUW);
2572 }
2573
2574 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2575 {
2576 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUL);
2577 }
2578
2579 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2580 {
2581 tcg_gen_qemu_st_i64(arg, addr, mem_index, MO_TEQ);
2582 }
2583
2584 #if TARGET_LONG_BITS == 64
2585 #define tcg_gen_movi_tl tcg_gen_movi_i64
2586 #define tcg_gen_mov_tl tcg_gen_mov_i64
2587 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2588 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2589 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2590 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2591 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2592 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2593 #define tcg_gen_ld_tl tcg_gen_ld_i64
2594 #define tcg_gen_st8_tl tcg_gen_st8_i64
2595 #define tcg_gen_st16_tl tcg_gen_st16_i64
2596 #define tcg_gen_st32_tl tcg_gen_st32_i64
2597 #define tcg_gen_st_tl tcg_gen_st_i64
2598 #define tcg_gen_add_tl tcg_gen_add_i64
2599 #define tcg_gen_addi_tl tcg_gen_addi_i64
2600 #define tcg_gen_sub_tl tcg_gen_sub_i64
2601 #define tcg_gen_neg_tl tcg_gen_neg_i64
2602 #define tcg_gen_subfi_tl tcg_gen_subfi_i64
2603 #define tcg_gen_subi_tl tcg_gen_subi_i64
2604 #define tcg_gen_and_tl tcg_gen_and_i64
2605 #define tcg_gen_andi_tl tcg_gen_andi_i64
2606 #define tcg_gen_or_tl tcg_gen_or_i64
2607 #define tcg_gen_ori_tl tcg_gen_ori_i64
2608 #define tcg_gen_xor_tl tcg_gen_xor_i64
2609 #define tcg_gen_xori_tl tcg_gen_xori_i64
2610 #define tcg_gen_not_tl tcg_gen_not_i64
2611 #define tcg_gen_shl_tl tcg_gen_shl_i64
2612 #define tcg_gen_shli_tl tcg_gen_shli_i64
2613 #define tcg_gen_shr_tl tcg_gen_shr_i64
2614 #define tcg_gen_shri_tl tcg_gen_shri_i64
2615 #define tcg_gen_sar_tl tcg_gen_sar_i64
2616 #define tcg_gen_sari_tl tcg_gen_sari_i64
2617 #define tcg_gen_brcond_tl tcg_gen_brcond_i64
2618 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2619 #define tcg_gen_setcond_tl tcg_gen_setcond_i64
2620 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2621 #define tcg_gen_mul_tl tcg_gen_mul_i64
2622 #define tcg_gen_muli_tl tcg_gen_muli_i64
2623 #define tcg_gen_div_tl tcg_gen_div_i64
2624 #define tcg_gen_rem_tl tcg_gen_rem_i64
2625 #define tcg_gen_divu_tl tcg_gen_divu_i64
2626 #define tcg_gen_remu_tl tcg_gen_remu_i64
2627 #define tcg_gen_discard_tl tcg_gen_discard_i64
2628 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2629 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2630 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2631 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2632 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2633 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2634 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2635 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2636 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2637 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2638 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2639 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2640 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2641 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2642 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2643 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2644 #define tcg_gen_extr_i64_tl tcg_gen_extr32_i64
2645 #define tcg_gen_andc_tl tcg_gen_andc_i64
2646 #define tcg_gen_eqv_tl tcg_gen_eqv_i64
2647 #define tcg_gen_nand_tl tcg_gen_nand_i64
2648 #define tcg_gen_nor_tl tcg_gen_nor_i64
2649 #define tcg_gen_orc_tl tcg_gen_orc_i64
2650 #define tcg_gen_rotl_tl tcg_gen_rotl_i64
2651 #define tcg_gen_rotli_tl tcg_gen_rotli_i64
2652 #define tcg_gen_rotr_tl tcg_gen_rotr_i64
2653 #define tcg_gen_rotri_tl tcg_gen_rotri_i64
2654 #define tcg_gen_deposit_tl tcg_gen_deposit_i64
2655 #define tcg_const_tl tcg_const_i64
2656 #define tcg_const_local_tl tcg_const_local_i64
2657 #define tcg_gen_movcond_tl tcg_gen_movcond_i64
2658 #define tcg_gen_add2_tl tcg_gen_add2_i64
2659 #define tcg_gen_sub2_tl tcg_gen_sub2_i64
2660 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i64
2661 #define tcg_gen_muls2_tl tcg_gen_muls2_i64
2662 #else
2663 #define tcg_gen_movi_tl tcg_gen_movi_i32
2664 #define tcg_gen_mov_tl tcg_gen_mov_i32
2665 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2666 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2667 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2668 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2669 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
2670 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
2671 #define tcg_gen_ld_tl tcg_gen_ld_i32
2672 #define tcg_gen_st8_tl tcg_gen_st8_i32
2673 #define tcg_gen_st16_tl tcg_gen_st16_i32
2674 #define tcg_gen_st32_tl tcg_gen_st_i32
2675 #define tcg_gen_st_tl tcg_gen_st_i32
2676 #define tcg_gen_add_tl tcg_gen_add_i32
2677 #define tcg_gen_addi_tl tcg_gen_addi_i32
2678 #define tcg_gen_sub_tl tcg_gen_sub_i32
2679 #define tcg_gen_neg_tl tcg_gen_neg_i32
2680 #define tcg_gen_subfi_tl tcg_gen_subfi_i32
2681 #define tcg_gen_subi_tl tcg_gen_subi_i32
2682 #define tcg_gen_and_tl tcg_gen_and_i32
2683 #define tcg_gen_andi_tl tcg_gen_andi_i32
2684 #define tcg_gen_or_tl tcg_gen_or_i32
2685 #define tcg_gen_ori_tl tcg_gen_ori_i32
2686 #define tcg_gen_xor_tl tcg_gen_xor_i32
2687 #define tcg_gen_xori_tl tcg_gen_xori_i32
2688 #define tcg_gen_not_tl tcg_gen_not_i32
2689 #define tcg_gen_shl_tl tcg_gen_shl_i32
2690 #define tcg_gen_shli_tl tcg_gen_shli_i32
2691 #define tcg_gen_shr_tl tcg_gen_shr_i32
2692 #define tcg_gen_shri_tl tcg_gen_shri_i32
2693 #define tcg_gen_sar_tl tcg_gen_sar_i32
2694 #define tcg_gen_sari_tl tcg_gen_sari_i32
2695 #define tcg_gen_brcond_tl tcg_gen_brcond_i32
2696 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2697 #define tcg_gen_setcond_tl tcg_gen_setcond_i32
2698 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2699 #define tcg_gen_mul_tl tcg_gen_mul_i32
2700 #define tcg_gen_muli_tl tcg_gen_muli_i32
2701 #define tcg_gen_div_tl tcg_gen_div_i32
2702 #define tcg_gen_rem_tl tcg_gen_rem_i32
2703 #define tcg_gen_divu_tl tcg_gen_divu_i32
2704 #define tcg_gen_remu_tl tcg_gen_remu_i32
2705 #define tcg_gen_discard_tl tcg_gen_discard_i32
2706 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2707 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2708 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2709 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2710 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2711 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2712 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2713 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2714 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2715 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2716 #define tcg_gen_ext32u_tl tcg_gen_mov_i32
2717 #define tcg_gen_ext32s_tl tcg_gen_mov_i32
2718 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2719 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2720 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2721 #define tcg_gen_extr_i64_tl tcg_gen_extr_i64_i32
2722 #define tcg_gen_andc_tl tcg_gen_andc_i32
2723 #define tcg_gen_eqv_tl tcg_gen_eqv_i32
2724 #define tcg_gen_nand_tl tcg_gen_nand_i32
2725 #define tcg_gen_nor_tl tcg_gen_nor_i32
2726 #define tcg_gen_orc_tl tcg_gen_orc_i32
2727 #define tcg_gen_rotl_tl tcg_gen_rotl_i32
2728 #define tcg_gen_rotli_tl tcg_gen_rotli_i32
2729 #define tcg_gen_rotr_tl tcg_gen_rotr_i32
2730 #define tcg_gen_rotri_tl tcg_gen_rotri_i32
2731 #define tcg_gen_deposit_tl tcg_gen_deposit_i32
2732 #define tcg_const_tl tcg_const_i32
2733 #define tcg_const_local_tl tcg_const_local_i32
2734 #define tcg_gen_movcond_tl tcg_gen_movcond_i32
2735 #define tcg_gen_add2_tl tcg_gen_add2_i32
2736 #define tcg_gen_sub2_tl tcg_gen_sub2_i32
2737 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i32
2738 #define tcg_gen_muls2_tl tcg_gen_muls2_i32
2739 #endif
2740
2741 #if UINTPTR_MAX == UINT32_MAX
2742 # define tcg_gen_ld_ptr(R, A, O) \
2743 tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2744 # define tcg_gen_discard_ptr(A) \
2745 tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2746 # define tcg_gen_add_ptr(R, A, B) \
2747 tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2748 # define tcg_gen_addi_ptr(R, A, B) \
2749 tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2750 # define tcg_gen_ext_i32_ptr(R, A) \
2751 tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
2752 #else
2753 # define tcg_gen_ld_ptr(R, A, O) \
2754 tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2755 # define tcg_gen_discard_ptr(A) \
2756 tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2757 # define tcg_gen_add_ptr(R, A, B) \
2758 tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2759 # define tcg_gen_addi_ptr(R, A, B) \
2760 tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2761 # define tcg_gen_ext_i32_ptr(R, A) \
2762 tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
2763 #endif /* UINTPTR_MAX == UINT32_MAX */