]> git.proxmox.com Git - qemu.git/blob - tcg/tcg-op.h
tcg: Constant fold add2 and sub2
[qemu.git] / tcg / tcg-op.h
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008 Fabrice Bellard
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24 #include "tcg.h"
25
26 int gen_new_label(void);
27
28 static inline void tcg_gen_op0(TCGOpcode opc)
29 {
30 *gen_opc_ptr++ = opc;
31 }
32
33 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1)
34 {
35 *gen_opc_ptr++ = opc;
36 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
37 }
38
39 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1)
40 {
41 *gen_opc_ptr++ = opc;
42 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
43 }
44
45 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1)
46 {
47 *gen_opc_ptr++ = opc;
48 *gen_opparam_ptr++ = arg1;
49 }
50
51 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2)
52 {
53 *gen_opc_ptr++ = opc;
54 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
55 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
56 }
57
58 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2)
59 {
60 *gen_opc_ptr++ = opc;
61 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
62 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
63 }
64
65 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2)
66 {
67 *gen_opc_ptr++ = opc;
68 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
69 *gen_opparam_ptr++ = arg2;
70 }
71
72 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2)
73 {
74 *gen_opc_ptr++ = opc;
75 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
76 *gen_opparam_ptr++ = arg2;
77 }
78
79 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2)
80 {
81 *gen_opc_ptr++ = opc;
82 *gen_opparam_ptr++ = arg1;
83 *gen_opparam_ptr++ = arg2;
84 }
85
86 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
87 TCGv_i32 arg3)
88 {
89 *gen_opc_ptr++ = opc;
90 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
91 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
92 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
93 }
94
95 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
96 TCGv_i64 arg3)
97 {
98 *gen_opc_ptr++ = opc;
99 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
100 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
101 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
102 }
103
104 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1,
105 TCGv_i32 arg2, TCGArg arg3)
106 {
107 *gen_opc_ptr++ = opc;
108 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
109 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
110 *gen_opparam_ptr++ = arg3;
111 }
112
113 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1,
114 TCGv_i64 arg2, TCGArg arg3)
115 {
116 *gen_opc_ptr++ = opc;
117 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
118 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
119 *gen_opparam_ptr++ = arg3;
120 }
121
122 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
123 TCGv_ptr base, TCGArg offset)
124 {
125 *gen_opc_ptr++ = opc;
126 *gen_opparam_ptr++ = GET_TCGV_I32(val);
127 *gen_opparam_ptr++ = GET_TCGV_PTR(base);
128 *gen_opparam_ptr++ = offset;
129 }
130
131 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
132 TCGv_ptr base, TCGArg offset)
133 {
134 *gen_opc_ptr++ = opc;
135 *gen_opparam_ptr++ = GET_TCGV_I64(val);
136 *gen_opparam_ptr++ = GET_TCGV_PTR(base);
137 *gen_opparam_ptr++ = offset;
138 }
139
140 static inline void tcg_gen_qemu_ldst_op_i64_i32(TCGOpcode opc, TCGv_i64 val,
141 TCGv_i32 addr, TCGArg mem_index)
142 {
143 *gen_opc_ptr++ = opc;
144 *gen_opparam_ptr++ = GET_TCGV_I64(val);
145 *gen_opparam_ptr++ = GET_TCGV_I32(addr);
146 *gen_opparam_ptr++ = mem_index;
147 }
148
149 static inline void tcg_gen_qemu_ldst_op_i64_i64(TCGOpcode opc, TCGv_i64 val,
150 TCGv_i64 addr, TCGArg mem_index)
151 {
152 *gen_opc_ptr++ = opc;
153 *gen_opparam_ptr++ = GET_TCGV_I64(val);
154 *gen_opparam_ptr++ = GET_TCGV_I64(addr);
155 *gen_opparam_ptr++ = mem_index;
156 }
157
158 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
159 TCGv_i32 arg3, TCGv_i32 arg4)
160 {
161 *gen_opc_ptr++ = opc;
162 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
163 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
164 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
165 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
166 }
167
168 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
169 TCGv_i64 arg3, TCGv_i64 arg4)
170 {
171 *gen_opc_ptr++ = opc;
172 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
173 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
174 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
175 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
176 }
177
178 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
179 TCGv_i32 arg3, TCGArg arg4)
180 {
181 *gen_opc_ptr++ = opc;
182 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
183 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
184 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
185 *gen_opparam_ptr++ = arg4;
186 }
187
188 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
189 TCGv_i64 arg3, TCGArg arg4)
190 {
191 *gen_opc_ptr++ = opc;
192 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
193 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
194 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
195 *gen_opparam_ptr++ = arg4;
196 }
197
198 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
199 TCGArg arg3, TCGArg arg4)
200 {
201 *gen_opc_ptr++ = opc;
202 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
203 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
204 *gen_opparam_ptr++ = arg3;
205 *gen_opparam_ptr++ = arg4;
206 }
207
208 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
209 TCGArg arg3, TCGArg arg4)
210 {
211 *gen_opc_ptr++ = opc;
212 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
213 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
214 *gen_opparam_ptr++ = arg3;
215 *gen_opparam_ptr++ = arg4;
216 }
217
218 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
219 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5)
220 {
221 *gen_opc_ptr++ = opc;
222 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
223 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
224 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
225 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
226 *gen_opparam_ptr++ = GET_TCGV_I32(arg5);
227 }
228
229 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
230 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5)
231 {
232 *gen_opc_ptr++ = opc;
233 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
234 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
235 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
236 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
237 *gen_opparam_ptr++ = GET_TCGV_I64(arg5);
238 }
239
240 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
241 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5)
242 {
243 *gen_opc_ptr++ = opc;
244 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
245 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
246 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
247 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
248 *gen_opparam_ptr++ = arg5;
249 }
250
251 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
252 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5)
253 {
254 *gen_opc_ptr++ = opc;
255 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
256 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
257 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
258 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
259 *gen_opparam_ptr++ = arg5;
260 }
261
262 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1,
263 TCGv_i32 arg2, TCGv_i32 arg3,
264 TCGArg arg4, TCGArg arg5)
265 {
266 *gen_opc_ptr++ = opc;
267 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
268 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
269 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
270 *gen_opparam_ptr++ = arg4;
271 *gen_opparam_ptr++ = arg5;
272 }
273
274 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1,
275 TCGv_i64 arg2, TCGv_i64 arg3,
276 TCGArg arg4, TCGArg arg5)
277 {
278 *gen_opc_ptr++ = opc;
279 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
280 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
281 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
282 *gen_opparam_ptr++ = arg4;
283 *gen_opparam_ptr++ = arg5;
284 }
285
286 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
287 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5,
288 TCGv_i32 arg6)
289 {
290 *gen_opc_ptr++ = opc;
291 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
292 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
293 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
294 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
295 *gen_opparam_ptr++ = GET_TCGV_I32(arg5);
296 *gen_opparam_ptr++ = GET_TCGV_I32(arg6);
297 }
298
299 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
300 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5,
301 TCGv_i64 arg6)
302 {
303 *gen_opc_ptr++ = opc;
304 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
305 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
306 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
307 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
308 *gen_opparam_ptr++ = GET_TCGV_I64(arg5);
309 *gen_opparam_ptr++ = GET_TCGV_I64(arg6);
310 }
311
312 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
313 TCGv_i32 arg3, TCGv_i32 arg4,
314 TCGv_i32 arg5, TCGArg arg6)
315 {
316 *gen_opc_ptr++ = opc;
317 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
318 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
319 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
320 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
321 *gen_opparam_ptr++ = GET_TCGV_I32(arg5);
322 *gen_opparam_ptr++ = arg6;
323 }
324
325 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
326 TCGv_i64 arg3, TCGv_i64 arg4,
327 TCGv_i64 arg5, TCGArg arg6)
328 {
329 *gen_opc_ptr++ = opc;
330 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
331 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
332 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
333 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
334 *gen_opparam_ptr++ = GET_TCGV_I64(arg5);
335 *gen_opparam_ptr++ = arg6;
336 }
337
338 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1,
339 TCGv_i32 arg2, TCGv_i32 arg3,
340 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6)
341 {
342 *gen_opc_ptr++ = opc;
343 *gen_opparam_ptr++ = GET_TCGV_I32(arg1);
344 *gen_opparam_ptr++ = GET_TCGV_I32(arg2);
345 *gen_opparam_ptr++ = GET_TCGV_I32(arg3);
346 *gen_opparam_ptr++ = GET_TCGV_I32(arg4);
347 *gen_opparam_ptr++ = arg5;
348 *gen_opparam_ptr++ = arg6;
349 }
350
351 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1,
352 TCGv_i64 arg2, TCGv_i64 arg3,
353 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6)
354 {
355 *gen_opc_ptr++ = opc;
356 *gen_opparam_ptr++ = GET_TCGV_I64(arg1);
357 *gen_opparam_ptr++ = GET_TCGV_I64(arg2);
358 *gen_opparam_ptr++ = GET_TCGV_I64(arg3);
359 *gen_opparam_ptr++ = GET_TCGV_I64(arg4);
360 *gen_opparam_ptr++ = arg5;
361 *gen_opparam_ptr++ = arg6;
362 }
363
364 static inline void gen_set_label(int n)
365 {
366 tcg_gen_op1i(INDEX_op_set_label, n);
367 }
368
369 static inline void tcg_gen_br(int label)
370 {
371 tcg_gen_op1i(INDEX_op_br, label);
372 }
373
374 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
375 {
376 if (!TCGV_EQUAL_I32(ret, arg))
377 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
378 }
379
380 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
381 {
382 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
383 }
384
385 /* A version of dh_sizemask from def-helper.h that doesn't rely on
386 preprocessor magic. */
387 static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed)
388 {
389 return (is_64bit << n*2) | (is_signed << (n*2 + 1));
390 }
391
392 /* helper calls */
393 static inline void tcg_gen_helperN(void *func, int flags, int sizemask,
394 TCGArg ret, int nargs, TCGArg *args)
395 {
396 TCGv_ptr fn;
397 fn = tcg_const_ptr(func);
398 tcg_gen_callN(&tcg_ctx, fn, flags, sizemask, ret,
399 nargs, args);
400 tcg_temp_free_ptr(fn);
401 }
402
403 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently
404 reserved for helpers in tcg-runtime.c. These helpers are all const
405 and pure, hence the call to tcg_gen_callN() with TCG_CALL_CONST |
406 TCG_CALL_PURE. This may need to be adjusted if these functions
407 start to be used with other helpers. */
408 static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret,
409 TCGv_i32 a, TCGv_i32 b)
410 {
411 TCGv_ptr fn;
412 TCGArg args[2];
413 fn = tcg_const_ptr(func);
414 args[0] = GET_TCGV_I32(a);
415 args[1] = GET_TCGV_I32(b);
416 tcg_gen_callN(&tcg_ctx, fn, TCG_CALL_CONST | TCG_CALL_PURE, sizemask,
417 GET_TCGV_I32(ret), 2, args);
418 tcg_temp_free_ptr(fn);
419 }
420
421 static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret,
422 TCGv_i64 a, TCGv_i64 b)
423 {
424 TCGv_ptr fn;
425 TCGArg args[2];
426 fn = tcg_const_ptr(func);
427 args[0] = GET_TCGV_I64(a);
428 args[1] = GET_TCGV_I64(b);
429 tcg_gen_callN(&tcg_ctx, fn, TCG_CALL_CONST | TCG_CALL_PURE, sizemask,
430 GET_TCGV_I64(ret), 2, args);
431 tcg_temp_free_ptr(fn);
432 }
433
434 /* 32 bit ops */
435
436 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
437 {
438 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
439 }
440
441 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
442 {
443 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
444 }
445
446 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
447 {
448 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
449 }
450
451 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
452 {
453 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
454 }
455
456 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
457 {
458 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
459 }
460
461 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
462 {
463 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
464 }
465
466 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
467 {
468 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
469 }
470
471 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
472 {
473 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
474 }
475
476 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
477 {
478 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
479 }
480
481 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
482 {
483 /* some cases can be optimized here */
484 if (arg2 == 0) {
485 tcg_gen_mov_i32(ret, arg1);
486 } else {
487 TCGv_i32 t0 = tcg_const_i32(arg2);
488 tcg_gen_add_i32(ret, arg1, t0);
489 tcg_temp_free_i32(t0);
490 }
491 }
492
493 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
494 {
495 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
496 }
497
498 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
499 {
500 TCGv_i32 t0 = tcg_const_i32(arg1);
501 tcg_gen_sub_i32(ret, t0, arg2);
502 tcg_temp_free_i32(t0);
503 }
504
505 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
506 {
507 /* some cases can be optimized here */
508 if (arg2 == 0) {
509 tcg_gen_mov_i32(ret, arg1);
510 } else {
511 TCGv_i32 t0 = tcg_const_i32(arg2);
512 tcg_gen_sub_i32(ret, arg1, t0);
513 tcg_temp_free_i32(t0);
514 }
515 }
516
517 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
518 {
519 if (TCGV_EQUAL_I32(arg1, arg2)) {
520 tcg_gen_mov_i32(ret, arg1);
521 } else {
522 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
523 }
524 }
525
526 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
527 {
528 TCGv_i32 t0;
529 /* Some cases can be optimized here. */
530 switch (arg2) {
531 case 0:
532 tcg_gen_movi_i32(ret, 0);
533 return;
534 case 0xffffffffu:
535 tcg_gen_mov_i32(ret, arg1);
536 return;
537 case 0xffu:
538 /* Don't recurse with tcg_gen_ext8u_i32. */
539 if (TCG_TARGET_HAS_ext8u_i32) {
540 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
541 return;
542 }
543 break;
544 case 0xffffu:
545 if (TCG_TARGET_HAS_ext16u_i32) {
546 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
547 return;
548 }
549 break;
550 }
551 t0 = tcg_const_i32(arg2);
552 tcg_gen_and_i32(ret, arg1, t0);
553 tcg_temp_free_i32(t0);
554 }
555
556 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
557 {
558 if (TCGV_EQUAL_I32(arg1, arg2)) {
559 tcg_gen_mov_i32(ret, arg1);
560 } else {
561 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
562 }
563 }
564
565 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
566 {
567 /* Some cases can be optimized here. */
568 if (arg2 == -1) {
569 tcg_gen_movi_i32(ret, -1);
570 } else if (arg2 == 0) {
571 tcg_gen_mov_i32(ret, arg1);
572 } else {
573 TCGv_i32 t0 = tcg_const_i32(arg2);
574 tcg_gen_or_i32(ret, arg1, t0);
575 tcg_temp_free_i32(t0);
576 }
577 }
578
579 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
580 {
581 if (TCGV_EQUAL_I32(arg1, arg2)) {
582 tcg_gen_movi_i32(ret, 0);
583 } else {
584 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
585 }
586 }
587
588 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
589 {
590 /* Some cases can be optimized here. */
591 if (arg2 == 0) {
592 tcg_gen_mov_i32(ret, arg1);
593 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
594 /* Don't recurse with tcg_gen_not_i32. */
595 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
596 } else {
597 TCGv_i32 t0 = tcg_const_i32(arg2);
598 tcg_gen_xor_i32(ret, arg1, t0);
599 tcg_temp_free_i32(t0);
600 }
601 }
602
603 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
604 {
605 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
606 }
607
608 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
609 {
610 if (arg2 == 0) {
611 tcg_gen_mov_i32(ret, arg1);
612 } else {
613 TCGv_i32 t0 = tcg_const_i32(arg2);
614 tcg_gen_shl_i32(ret, arg1, t0);
615 tcg_temp_free_i32(t0);
616 }
617 }
618
619 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
620 {
621 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
622 }
623
624 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
625 {
626 if (arg2 == 0) {
627 tcg_gen_mov_i32(ret, arg1);
628 } else {
629 TCGv_i32 t0 = tcg_const_i32(arg2);
630 tcg_gen_shr_i32(ret, arg1, t0);
631 tcg_temp_free_i32(t0);
632 }
633 }
634
635 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
636 {
637 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
638 }
639
640 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
641 {
642 if (arg2 == 0) {
643 tcg_gen_mov_i32(ret, arg1);
644 } else {
645 TCGv_i32 t0 = tcg_const_i32(arg2);
646 tcg_gen_sar_i32(ret, arg1, t0);
647 tcg_temp_free_i32(t0);
648 }
649 }
650
651 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1,
652 TCGv_i32 arg2, int label_index)
653 {
654 if (cond == TCG_COND_ALWAYS) {
655 tcg_gen_br(label_index);
656 } else if (cond != TCG_COND_NEVER) {
657 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
658 }
659 }
660
661 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1,
662 int32_t arg2, int label_index)
663 {
664 if (cond == TCG_COND_ALWAYS) {
665 tcg_gen_br(label_index);
666 } else if (cond != TCG_COND_NEVER) {
667 TCGv_i32 t0 = tcg_const_i32(arg2);
668 tcg_gen_brcond_i32(cond, arg1, t0, label_index);
669 tcg_temp_free_i32(t0);
670 }
671 }
672
673 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
674 TCGv_i32 arg1, TCGv_i32 arg2)
675 {
676 if (cond == TCG_COND_ALWAYS) {
677 tcg_gen_movi_i32(ret, 1);
678 } else if (cond == TCG_COND_NEVER) {
679 tcg_gen_movi_i32(ret, 0);
680 } else {
681 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
682 }
683 }
684
685 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
686 TCGv_i32 arg1, int32_t arg2)
687 {
688 if (cond == TCG_COND_ALWAYS) {
689 tcg_gen_movi_i32(ret, 1);
690 } else if (cond == TCG_COND_NEVER) {
691 tcg_gen_movi_i32(ret, 0);
692 } else {
693 TCGv_i32 t0 = tcg_const_i32(arg2);
694 tcg_gen_setcond_i32(cond, ret, arg1, t0);
695 tcg_temp_free_i32(t0);
696 }
697 }
698
699 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
700 {
701 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
702 }
703
704 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
705 {
706 TCGv_i32 t0 = tcg_const_i32(arg2);
707 tcg_gen_mul_i32(ret, arg1, t0);
708 tcg_temp_free_i32(t0);
709 }
710
711 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
712 {
713 if (TCG_TARGET_HAS_div_i32) {
714 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
715 } else if (TCG_TARGET_HAS_div2_i32) {
716 TCGv_i32 t0 = tcg_temp_new_i32();
717 tcg_gen_sari_i32(t0, arg1, 31);
718 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
719 tcg_temp_free_i32(t0);
720 } else {
721 int sizemask = 0;
722 /* Return value and both arguments are 32-bit and signed. */
723 sizemask |= tcg_gen_sizemask(0, 0, 1);
724 sizemask |= tcg_gen_sizemask(1, 0, 1);
725 sizemask |= tcg_gen_sizemask(2, 0, 1);
726 tcg_gen_helper32(tcg_helper_div_i32, sizemask, ret, arg1, arg2);
727 }
728 }
729
730 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
731 {
732 if (TCG_TARGET_HAS_div_i32) {
733 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
734 } else if (TCG_TARGET_HAS_div2_i32) {
735 TCGv_i32 t0 = tcg_temp_new_i32();
736 tcg_gen_sari_i32(t0, arg1, 31);
737 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
738 tcg_temp_free_i32(t0);
739 } else {
740 int sizemask = 0;
741 /* Return value and both arguments are 32-bit and signed. */
742 sizemask |= tcg_gen_sizemask(0, 0, 1);
743 sizemask |= tcg_gen_sizemask(1, 0, 1);
744 sizemask |= tcg_gen_sizemask(2, 0, 1);
745 tcg_gen_helper32(tcg_helper_rem_i32, sizemask, ret, arg1, arg2);
746 }
747 }
748
749 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
750 {
751 if (TCG_TARGET_HAS_div_i32) {
752 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
753 } else if (TCG_TARGET_HAS_div2_i32) {
754 TCGv_i32 t0 = tcg_temp_new_i32();
755 tcg_gen_movi_i32(t0, 0);
756 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
757 tcg_temp_free_i32(t0);
758 } else {
759 int sizemask = 0;
760 /* Return value and both arguments are 32-bit and unsigned. */
761 sizemask |= tcg_gen_sizemask(0, 0, 0);
762 sizemask |= tcg_gen_sizemask(1, 0, 0);
763 sizemask |= tcg_gen_sizemask(2, 0, 0);
764 tcg_gen_helper32(tcg_helper_divu_i32, sizemask, ret, arg1, arg2);
765 }
766 }
767
768 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
769 {
770 if (TCG_TARGET_HAS_div_i32) {
771 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
772 } else if (TCG_TARGET_HAS_div2_i32) {
773 TCGv_i32 t0 = tcg_temp_new_i32();
774 tcg_gen_movi_i32(t0, 0);
775 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
776 tcg_temp_free_i32(t0);
777 } else {
778 int sizemask = 0;
779 /* Return value and both arguments are 32-bit and unsigned. */
780 sizemask |= tcg_gen_sizemask(0, 0, 0);
781 sizemask |= tcg_gen_sizemask(1, 0, 0);
782 sizemask |= tcg_gen_sizemask(2, 0, 0);
783 tcg_gen_helper32(tcg_helper_remu_i32, sizemask, ret, arg1, arg2);
784 }
785 }
786
787 #if TCG_TARGET_REG_BITS == 32
788
789 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
790 {
791 if (!TCGV_EQUAL_I64(ret, arg)) {
792 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
793 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
794 }
795 }
796
797 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
798 {
799 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
800 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
801 }
802
803 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
804 tcg_target_long offset)
805 {
806 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
807 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
808 }
809
810 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
811 tcg_target_long offset)
812 {
813 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
814 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
815 }
816
817 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
818 tcg_target_long offset)
819 {
820 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
821 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
822 }
823
824 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
825 tcg_target_long offset)
826 {
827 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
828 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
829 }
830
831 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
832 tcg_target_long offset)
833 {
834 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
835 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
836 }
837
838 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
839 tcg_target_long offset)
840 {
841 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
842 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
843 }
844
845 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
846 tcg_target_long offset)
847 {
848 /* since arg2 and ret have different types, they cannot be the
849 same temporary */
850 #ifdef TCG_TARGET_WORDS_BIGENDIAN
851 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
852 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
853 #else
854 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
855 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
856 #endif
857 }
858
859 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
860 tcg_target_long offset)
861 {
862 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
863 }
864
865 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
866 tcg_target_long offset)
867 {
868 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
869 }
870
871 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
872 tcg_target_long offset)
873 {
874 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
875 }
876
877 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
878 tcg_target_long offset)
879 {
880 #ifdef TCG_TARGET_WORDS_BIGENDIAN
881 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
882 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
883 #else
884 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
885 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
886 #endif
887 }
888
889 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
890 {
891 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
892 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
893 TCGV_HIGH(arg2));
894 /* Allow the optimizer room to replace add2 with two moves. */
895 tcg_gen_op0(INDEX_op_nop);
896 }
897
898 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
899 {
900 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
901 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
902 TCGV_HIGH(arg2));
903 /* Allow the optimizer room to replace sub2 with two moves. */
904 tcg_gen_op0(INDEX_op_nop);
905 }
906
907 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
908 {
909 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
910 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
911 }
912
913 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
914 {
915 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
916 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
917 }
918
919 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
920 {
921 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
922 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
923 }
924
925 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
926 {
927 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
928 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
929 }
930
931 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
932 {
933 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
934 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
935 }
936
937 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
938 {
939 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
940 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
941 }
942
943 /* XXX: use generic code when basic block handling is OK or CPU
944 specific code (x86) */
945 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
946 {
947 int sizemask = 0;
948 /* Return value and both arguments are 64-bit and signed. */
949 sizemask |= tcg_gen_sizemask(0, 1, 1);
950 sizemask |= tcg_gen_sizemask(1, 1, 1);
951 sizemask |= tcg_gen_sizemask(2, 1, 1);
952
953 tcg_gen_helper64(tcg_helper_shl_i64, sizemask, ret, arg1, arg2);
954 }
955
956 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
957 {
958 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
959 }
960
961 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
962 {
963 int sizemask = 0;
964 /* Return value and both arguments are 64-bit and signed. */
965 sizemask |= tcg_gen_sizemask(0, 1, 1);
966 sizemask |= tcg_gen_sizemask(1, 1, 1);
967 sizemask |= tcg_gen_sizemask(2, 1, 1);
968
969 tcg_gen_helper64(tcg_helper_shr_i64, sizemask, ret, arg1, arg2);
970 }
971
972 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
973 {
974 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
975 }
976
977 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
978 {
979 int sizemask = 0;
980 /* Return value and both arguments are 64-bit and signed. */
981 sizemask |= tcg_gen_sizemask(0, 1, 1);
982 sizemask |= tcg_gen_sizemask(1, 1, 1);
983 sizemask |= tcg_gen_sizemask(2, 1, 1);
984
985 tcg_gen_helper64(tcg_helper_sar_i64, sizemask, ret, arg1, arg2);
986 }
987
988 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
989 {
990 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
991 }
992
993 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
994 TCGv_i64 arg2, int label_index)
995 {
996 if (cond == TCG_COND_ALWAYS) {
997 tcg_gen_br(label_index);
998 } else if (cond != TCG_COND_NEVER) {
999 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1000 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
1001 TCGV_HIGH(arg2), cond, label_index);
1002 }
1003 }
1004
1005 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1006 TCGv_i64 arg1, TCGv_i64 arg2)
1007 {
1008 if (cond == TCG_COND_ALWAYS) {
1009 tcg_gen_movi_i32(TCGV_LOW(ret), 1);
1010 } else if (cond == TCG_COND_NEVER) {
1011 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1012 } else {
1013 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1014 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1015 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1016 }
1017 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1018 }
1019
1020 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1021 {
1022 TCGv_i64 t0;
1023 TCGv_i32 t1;
1024
1025 t0 = tcg_temp_new_i64();
1026 t1 = tcg_temp_new_i32();
1027
1028 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
1029 TCGV_LOW(arg1), TCGV_LOW(arg2));
1030
1031 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1032 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1033 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1034 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1035
1036 tcg_gen_mov_i64(ret, t0);
1037 tcg_temp_free_i64(t0);
1038 tcg_temp_free_i32(t1);
1039 }
1040
1041 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1042 {
1043 int sizemask = 0;
1044 /* Return value and both arguments are 64-bit and signed. */
1045 sizemask |= tcg_gen_sizemask(0, 1, 1);
1046 sizemask |= tcg_gen_sizemask(1, 1, 1);
1047 sizemask |= tcg_gen_sizemask(2, 1, 1);
1048
1049 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
1050 }
1051
1052 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1053 {
1054 int sizemask = 0;
1055 /* Return value and both arguments are 64-bit and signed. */
1056 sizemask |= tcg_gen_sizemask(0, 1, 1);
1057 sizemask |= tcg_gen_sizemask(1, 1, 1);
1058 sizemask |= tcg_gen_sizemask(2, 1, 1);
1059
1060 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
1061 }
1062
1063 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1064 {
1065 int sizemask = 0;
1066 /* Return value and both arguments are 64-bit and unsigned. */
1067 sizemask |= tcg_gen_sizemask(0, 1, 0);
1068 sizemask |= tcg_gen_sizemask(1, 1, 0);
1069 sizemask |= tcg_gen_sizemask(2, 1, 0);
1070
1071 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
1072 }
1073
1074 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1075 {
1076 int sizemask = 0;
1077 /* Return value and both arguments are 64-bit and unsigned. */
1078 sizemask |= tcg_gen_sizemask(0, 1, 0);
1079 sizemask |= tcg_gen_sizemask(1, 1, 0);
1080 sizemask |= tcg_gen_sizemask(2, 1, 0);
1081
1082 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
1083 }
1084
1085 #else
1086
1087 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1088 {
1089 if (!TCGV_EQUAL_I64(ret, arg))
1090 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1091 }
1092
1093 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1094 {
1095 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
1096 }
1097
1098 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1099 tcg_target_long offset)
1100 {
1101 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1102 }
1103
1104 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1105 tcg_target_long offset)
1106 {
1107 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1108 }
1109
1110 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1111 tcg_target_long offset)
1112 {
1113 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1114 }
1115
1116 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1117 tcg_target_long offset)
1118 {
1119 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1120 }
1121
1122 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1123 tcg_target_long offset)
1124 {
1125 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1126 }
1127
1128 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1129 tcg_target_long offset)
1130 {
1131 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1132 }
1133
1134 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1135 {
1136 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1137 }
1138
1139 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1140 tcg_target_long offset)
1141 {
1142 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1143 }
1144
1145 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1146 tcg_target_long offset)
1147 {
1148 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1149 }
1150
1151 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1152 tcg_target_long offset)
1153 {
1154 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1155 }
1156
1157 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1158 {
1159 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1160 }
1161
1162 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1163 {
1164 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1165 }
1166
1167 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1168 {
1169 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1170 }
1171
1172 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1173 {
1174 if (TCGV_EQUAL_I64(arg1, arg2)) {
1175 tcg_gen_mov_i64(ret, arg1);
1176 } else {
1177 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1178 }
1179 }
1180
1181 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1182 {
1183 TCGv_i64 t0;
1184 /* Some cases can be optimized here. */
1185 switch (arg2) {
1186 case 0:
1187 tcg_gen_movi_i64(ret, 0);
1188 return;
1189 case 0xffffffffffffffffull:
1190 tcg_gen_mov_i64(ret, arg1);
1191 return;
1192 case 0xffull:
1193 /* Don't recurse with tcg_gen_ext8u_i32. */
1194 if (TCG_TARGET_HAS_ext8u_i64) {
1195 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1196 return;
1197 }
1198 break;
1199 case 0xffffu:
1200 if (TCG_TARGET_HAS_ext16u_i64) {
1201 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1202 return;
1203 }
1204 break;
1205 case 0xffffffffull:
1206 if (TCG_TARGET_HAS_ext32u_i64) {
1207 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1208 return;
1209 }
1210 break;
1211 }
1212 t0 = tcg_const_i64(arg2);
1213 tcg_gen_and_i64(ret, arg1, t0);
1214 tcg_temp_free_i64(t0);
1215 }
1216
1217 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1218 {
1219 if (TCGV_EQUAL_I64(arg1, arg2)) {
1220 tcg_gen_mov_i64(ret, arg1);
1221 } else {
1222 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1223 }
1224 }
1225
1226 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1227 {
1228 /* Some cases can be optimized here. */
1229 if (arg2 == -1) {
1230 tcg_gen_movi_i64(ret, -1);
1231 } else if (arg2 == 0) {
1232 tcg_gen_mov_i64(ret, arg1);
1233 } else {
1234 TCGv_i64 t0 = tcg_const_i64(arg2);
1235 tcg_gen_or_i64(ret, arg1, t0);
1236 tcg_temp_free_i64(t0);
1237 }
1238 }
1239
1240 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1241 {
1242 if (TCGV_EQUAL_I64(arg1, arg2)) {
1243 tcg_gen_movi_i64(ret, 0);
1244 } else {
1245 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1246 }
1247 }
1248
1249 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1250 {
1251 /* Some cases can be optimized here. */
1252 if (arg2 == 0) {
1253 tcg_gen_mov_i64(ret, arg1);
1254 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1255 /* Don't recurse with tcg_gen_not_i64. */
1256 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1257 } else {
1258 TCGv_i64 t0 = tcg_const_i64(arg2);
1259 tcg_gen_xor_i64(ret, arg1, t0);
1260 tcg_temp_free_i64(t0);
1261 }
1262 }
1263
1264 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1265 {
1266 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1267 }
1268
1269 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1270 {
1271 if (arg2 == 0) {
1272 tcg_gen_mov_i64(ret, arg1);
1273 } else {
1274 TCGv_i64 t0 = tcg_const_i64(arg2);
1275 tcg_gen_shl_i64(ret, arg1, t0);
1276 tcg_temp_free_i64(t0);
1277 }
1278 }
1279
1280 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1281 {
1282 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1283 }
1284
1285 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1286 {
1287 if (arg2 == 0) {
1288 tcg_gen_mov_i64(ret, arg1);
1289 } else {
1290 TCGv_i64 t0 = tcg_const_i64(arg2);
1291 tcg_gen_shr_i64(ret, arg1, t0);
1292 tcg_temp_free_i64(t0);
1293 }
1294 }
1295
1296 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1297 {
1298 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1299 }
1300
1301 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1302 {
1303 if (arg2 == 0) {
1304 tcg_gen_mov_i64(ret, arg1);
1305 } else {
1306 TCGv_i64 t0 = tcg_const_i64(arg2);
1307 tcg_gen_sar_i64(ret, arg1, t0);
1308 tcg_temp_free_i64(t0);
1309 }
1310 }
1311
1312 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
1313 TCGv_i64 arg2, int label_index)
1314 {
1315 if (cond == TCG_COND_ALWAYS) {
1316 tcg_gen_br(label_index);
1317 } else if (cond != TCG_COND_NEVER) {
1318 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
1319 }
1320 }
1321
1322 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1323 TCGv_i64 arg1, TCGv_i64 arg2)
1324 {
1325 if (cond == TCG_COND_ALWAYS) {
1326 tcg_gen_movi_i64(ret, 1);
1327 } else if (cond == TCG_COND_NEVER) {
1328 tcg_gen_movi_i64(ret, 0);
1329 } else {
1330 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1331 }
1332 }
1333
1334 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1335 {
1336 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1337 }
1338
1339 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1340 {
1341 if (TCG_TARGET_HAS_div_i64) {
1342 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1343 } else if (TCG_TARGET_HAS_div2_i64) {
1344 TCGv_i64 t0 = tcg_temp_new_i64();
1345 tcg_gen_sari_i64(t0, arg1, 63);
1346 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1347 tcg_temp_free_i64(t0);
1348 } else {
1349 int sizemask = 0;
1350 /* Return value and both arguments are 64-bit and signed. */
1351 sizemask |= tcg_gen_sizemask(0, 1, 1);
1352 sizemask |= tcg_gen_sizemask(1, 1, 1);
1353 sizemask |= tcg_gen_sizemask(2, 1, 1);
1354 tcg_gen_helper64(tcg_helper_div_i64, sizemask, ret, arg1, arg2);
1355 }
1356 }
1357
1358 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1359 {
1360 if (TCG_TARGET_HAS_div_i64) {
1361 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1362 } else if (TCG_TARGET_HAS_div2_i64) {
1363 TCGv_i64 t0 = tcg_temp_new_i64();
1364 tcg_gen_sari_i64(t0, arg1, 63);
1365 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1366 tcg_temp_free_i64(t0);
1367 } else {
1368 int sizemask = 0;
1369 /* Return value and both arguments are 64-bit and signed. */
1370 sizemask |= tcg_gen_sizemask(0, 1, 1);
1371 sizemask |= tcg_gen_sizemask(1, 1, 1);
1372 sizemask |= tcg_gen_sizemask(2, 1, 1);
1373 tcg_gen_helper64(tcg_helper_rem_i64, sizemask, ret, arg1, arg2);
1374 }
1375 }
1376
1377 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1378 {
1379 if (TCG_TARGET_HAS_div_i64) {
1380 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1381 } else if (TCG_TARGET_HAS_div2_i64) {
1382 TCGv_i64 t0 = tcg_temp_new_i64();
1383 tcg_gen_movi_i64(t0, 0);
1384 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1385 tcg_temp_free_i64(t0);
1386 } else {
1387 int sizemask = 0;
1388 /* Return value and both arguments are 64-bit and unsigned. */
1389 sizemask |= tcg_gen_sizemask(0, 1, 0);
1390 sizemask |= tcg_gen_sizemask(1, 1, 0);
1391 sizemask |= tcg_gen_sizemask(2, 1, 0);
1392 tcg_gen_helper64(tcg_helper_divu_i64, sizemask, ret, arg1, arg2);
1393 }
1394 }
1395
1396 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1397 {
1398 if (TCG_TARGET_HAS_div_i64) {
1399 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1400 } else if (TCG_TARGET_HAS_div2_i64) {
1401 TCGv_i64 t0 = tcg_temp_new_i64();
1402 tcg_gen_movi_i64(t0, 0);
1403 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1404 tcg_temp_free_i64(t0);
1405 } else {
1406 int sizemask = 0;
1407 /* Return value and both arguments are 64-bit and unsigned. */
1408 sizemask |= tcg_gen_sizemask(0, 1, 0);
1409 sizemask |= tcg_gen_sizemask(1, 1, 0);
1410 sizemask |= tcg_gen_sizemask(2, 1, 0);
1411 tcg_gen_helper64(tcg_helper_remu_i64, sizemask, ret, arg1, arg2);
1412 }
1413 }
1414 #endif /* TCG_TARGET_REG_BITS == 32 */
1415
1416 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1417 {
1418 /* some cases can be optimized here */
1419 if (arg2 == 0) {
1420 tcg_gen_mov_i64(ret, arg1);
1421 } else {
1422 TCGv_i64 t0 = tcg_const_i64(arg2);
1423 tcg_gen_add_i64(ret, arg1, t0);
1424 tcg_temp_free_i64(t0);
1425 }
1426 }
1427
1428 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1429 {
1430 TCGv_i64 t0 = tcg_const_i64(arg1);
1431 tcg_gen_sub_i64(ret, t0, arg2);
1432 tcg_temp_free_i64(t0);
1433 }
1434
1435 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1436 {
1437 /* some cases can be optimized here */
1438 if (arg2 == 0) {
1439 tcg_gen_mov_i64(ret, arg1);
1440 } else {
1441 TCGv_i64 t0 = tcg_const_i64(arg2);
1442 tcg_gen_sub_i64(ret, arg1, t0);
1443 tcg_temp_free_i64(t0);
1444 }
1445 }
1446 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1,
1447 int64_t arg2, int label_index)
1448 {
1449 if (cond == TCG_COND_ALWAYS) {
1450 tcg_gen_br(label_index);
1451 } else if (cond != TCG_COND_NEVER) {
1452 TCGv_i64 t0 = tcg_const_i64(arg2);
1453 tcg_gen_brcond_i64(cond, arg1, t0, label_index);
1454 tcg_temp_free_i64(t0);
1455 }
1456 }
1457
1458 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1459 TCGv_i64 arg1, int64_t arg2)
1460 {
1461 TCGv_i64 t0 = tcg_const_i64(arg2);
1462 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1463 tcg_temp_free_i64(t0);
1464 }
1465
1466 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1467 {
1468 TCGv_i64 t0 = tcg_const_i64(arg2);
1469 tcg_gen_mul_i64(ret, arg1, t0);
1470 tcg_temp_free_i64(t0);
1471 }
1472
1473
1474 /***************************************/
1475 /* optional operations */
1476
1477 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1478 {
1479 if (TCG_TARGET_HAS_ext8s_i32) {
1480 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1481 } else {
1482 tcg_gen_shli_i32(ret, arg, 24);
1483 tcg_gen_sari_i32(ret, ret, 24);
1484 }
1485 }
1486
1487 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1488 {
1489 if (TCG_TARGET_HAS_ext16s_i32) {
1490 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1491 } else {
1492 tcg_gen_shli_i32(ret, arg, 16);
1493 tcg_gen_sari_i32(ret, ret, 16);
1494 }
1495 }
1496
1497 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1498 {
1499 if (TCG_TARGET_HAS_ext8u_i32) {
1500 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1501 } else {
1502 tcg_gen_andi_i32(ret, arg, 0xffu);
1503 }
1504 }
1505
1506 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1507 {
1508 if (TCG_TARGET_HAS_ext16u_i32) {
1509 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1510 } else {
1511 tcg_gen_andi_i32(ret, arg, 0xffffu);
1512 }
1513 }
1514
1515 /* Note: we assume the two high bytes are set to zero */
1516 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1517 {
1518 if (TCG_TARGET_HAS_bswap16_i32) {
1519 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1520 } else {
1521 TCGv_i32 t0 = tcg_temp_new_i32();
1522
1523 tcg_gen_ext8u_i32(t0, arg);
1524 tcg_gen_shli_i32(t0, t0, 8);
1525 tcg_gen_shri_i32(ret, arg, 8);
1526 tcg_gen_or_i32(ret, ret, t0);
1527 tcg_temp_free_i32(t0);
1528 }
1529 }
1530
1531 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1532 {
1533 if (TCG_TARGET_HAS_bswap32_i32) {
1534 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1535 } else {
1536 TCGv_i32 t0, t1;
1537 t0 = tcg_temp_new_i32();
1538 t1 = tcg_temp_new_i32();
1539
1540 tcg_gen_shli_i32(t0, arg, 24);
1541
1542 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
1543 tcg_gen_shli_i32(t1, t1, 8);
1544 tcg_gen_or_i32(t0, t0, t1);
1545
1546 tcg_gen_shri_i32(t1, arg, 8);
1547 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
1548 tcg_gen_or_i32(t0, t0, t1);
1549
1550 tcg_gen_shri_i32(t1, arg, 24);
1551 tcg_gen_or_i32(ret, t0, t1);
1552 tcg_temp_free_i32(t0);
1553 tcg_temp_free_i32(t1);
1554 }
1555 }
1556
1557 #if TCG_TARGET_REG_BITS == 32
1558 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1559 {
1560 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1561 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1562 }
1563
1564 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1565 {
1566 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1567 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1568 }
1569
1570 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1571 {
1572 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1573 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1574 }
1575
1576 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1577 {
1578 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1579 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1580 }
1581
1582 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1583 {
1584 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1585 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1586 }
1587
1588 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1589 {
1590 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1591 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1592 }
1593
1594 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1595 {
1596 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1597 }
1598
1599 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1600 {
1601 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1602 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1603 }
1604
1605 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1606 {
1607 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1608 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1609 }
1610
1611 /* Note: we assume the six high bytes are set to zero */
1612 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1613 {
1614 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1615 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1616 }
1617
1618 /* Note: we assume the four high bytes are set to zero */
1619 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1620 {
1621 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1622 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1623 }
1624
1625 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1626 {
1627 TCGv_i32 t0, t1;
1628 t0 = tcg_temp_new_i32();
1629 t1 = tcg_temp_new_i32();
1630
1631 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1632 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1633 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1634 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1635 tcg_temp_free_i32(t0);
1636 tcg_temp_free_i32(t1);
1637 }
1638 #else
1639
1640 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1641 {
1642 if (TCG_TARGET_HAS_ext8s_i64) {
1643 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1644 } else {
1645 tcg_gen_shli_i64(ret, arg, 56);
1646 tcg_gen_sari_i64(ret, ret, 56);
1647 }
1648 }
1649
1650 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1651 {
1652 if (TCG_TARGET_HAS_ext16s_i64) {
1653 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1654 } else {
1655 tcg_gen_shli_i64(ret, arg, 48);
1656 tcg_gen_sari_i64(ret, ret, 48);
1657 }
1658 }
1659
1660 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1661 {
1662 if (TCG_TARGET_HAS_ext32s_i64) {
1663 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1664 } else {
1665 tcg_gen_shli_i64(ret, arg, 32);
1666 tcg_gen_sari_i64(ret, ret, 32);
1667 }
1668 }
1669
1670 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1671 {
1672 if (TCG_TARGET_HAS_ext8u_i64) {
1673 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1674 } else {
1675 tcg_gen_andi_i64(ret, arg, 0xffu);
1676 }
1677 }
1678
1679 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1680 {
1681 if (TCG_TARGET_HAS_ext16u_i64) {
1682 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1683 } else {
1684 tcg_gen_andi_i64(ret, arg, 0xffffu);
1685 }
1686 }
1687
1688 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1689 {
1690 if (TCG_TARGET_HAS_ext32u_i64) {
1691 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1692 } else {
1693 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1694 }
1695 }
1696
1697 /* Note: we assume the target supports move between 32 and 64 bit
1698 registers. This will probably break MIPS64 targets. */
1699 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1700 {
1701 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1702 }
1703
1704 /* Note: we assume the target supports move between 32 and 64 bit
1705 registers */
1706 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1707 {
1708 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1709 }
1710
1711 /* Note: we assume the target supports move between 32 and 64 bit
1712 registers */
1713 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1714 {
1715 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1716 }
1717
1718 /* Note: we assume the six high bytes are set to zero */
1719 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1720 {
1721 if (TCG_TARGET_HAS_bswap16_i64) {
1722 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1723 } else {
1724 TCGv_i64 t0 = tcg_temp_new_i64();
1725
1726 tcg_gen_ext8u_i64(t0, arg);
1727 tcg_gen_shli_i64(t0, t0, 8);
1728 tcg_gen_shri_i64(ret, arg, 8);
1729 tcg_gen_or_i64(ret, ret, t0);
1730 tcg_temp_free_i64(t0);
1731 }
1732 }
1733
1734 /* Note: we assume the four high bytes are set to zero */
1735 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1736 {
1737 if (TCG_TARGET_HAS_bswap32_i64) {
1738 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1739 } else {
1740 TCGv_i64 t0, t1;
1741 t0 = tcg_temp_new_i64();
1742 t1 = tcg_temp_new_i64();
1743
1744 tcg_gen_shli_i64(t0, arg, 24);
1745 tcg_gen_ext32u_i64(t0, t0);
1746
1747 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1748 tcg_gen_shli_i64(t1, t1, 8);
1749 tcg_gen_or_i64(t0, t0, t1);
1750
1751 tcg_gen_shri_i64(t1, arg, 8);
1752 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1753 tcg_gen_or_i64(t0, t0, t1);
1754
1755 tcg_gen_shri_i64(t1, arg, 24);
1756 tcg_gen_or_i64(ret, t0, t1);
1757 tcg_temp_free_i64(t0);
1758 tcg_temp_free_i64(t1);
1759 }
1760 }
1761
1762 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1763 {
1764 if (TCG_TARGET_HAS_bswap64_i64) {
1765 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1766 } else {
1767 TCGv_i64 t0 = tcg_temp_new_i64();
1768 TCGv_i64 t1 = tcg_temp_new_i64();
1769
1770 tcg_gen_shli_i64(t0, arg, 56);
1771
1772 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1773 tcg_gen_shli_i64(t1, t1, 40);
1774 tcg_gen_or_i64(t0, t0, t1);
1775
1776 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1777 tcg_gen_shli_i64(t1, t1, 24);
1778 tcg_gen_or_i64(t0, t0, t1);
1779
1780 tcg_gen_andi_i64(t1, arg, 0xff000000);
1781 tcg_gen_shli_i64(t1, t1, 8);
1782 tcg_gen_or_i64(t0, t0, t1);
1783
1784 tcg_gen_shri_i64(t1, arg, 8);
1785 tcg_gen_andi_i64(t1, t1, 0xff000000);
1786 tcg_gen_or_i64(t0, t0, t1);
1787
1788 tcg_gen_shri_i64(t1, arg, 24);
1789 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1790 tcg_gen_or_i64(t0, t0, t1);
1791
1792 tcg_gen_shri_i64(t1, arg, 40);
1793 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1794 tcg_gen_or_i64(t0, t0, t1);
1795
1796 tcg_gen_shri_i64(t1, arg, 56);
1797 tcg_gen_or_i64(ret, t0, t1);
1798 tcg_temp_free_i64(t0);
1799 tcg_temp_free_i64(t1);
1800 }
1801 }
1802
1803 #endif
1804
1805 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
1806 {
1807 if (TCG_TARGET_HAS_neg_i32) {
1808 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
1809 } else {
1810 TCGv_i32 t0 = tcg_const_i32(0);
1811 tcg_gen_sub_i32(ret, t0, arg);
1812 tcg_temp_free_i32(t0);
1813 }
1814 }
1815
1816 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1817 {
1818 if (TCG_TARGET_HAS_neg_i64) {
1819 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1820 } else {
1821 TCGv_i64 t0 = tcg_const_i64(0);
1822 tcg_gen_sub_i64(ret, t0, arg);
1823 tcg_temp_free_i64(t0);
1824 }
1825 }
1826
1827 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
1828 {
1829 if (TCG_TARGET_HAS_not_i32) {
1830 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
1831 } else {
1832 tcg_gen_xori_i32(ret, arg, -1);
1833 }
1834 }
1835
1836 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1837 {
1838 #if TCG_TARGET_REG_BITS == 64
1839 if (TCG_TARGET_HAS_not_i64) {
1840 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1841 } else {
1842 tcg_gen_xori_i64(ret, arg, -1);
1843 }
1844 #else
1845 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1846 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1847 #endif
1848 }
1849
1850 static inline void tcg_gen_discard_i32(TCGv_i32 arg)
1851 {
1852 tcg_gen_op1_i32(INDEX_op_discard, arg);
1853 }
1854
1855 static inline void tcg_gen_discard_i64(TCGv_i64 arg)
1856 {
1857 #if TCG_TARGET_REG_BITS == 32
1858 tcg_gen_discard_i32(TCGV_LOW(arg));
1859 tcg_gen_discard_i32(TCGV_HIGH(arg));
1860 #else
1861 tcg_gen_op1_i64(INDEX_op_discard, arg);
1862 #endif
1863 }
1864
1865 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1866 {
1867 if (TCG_TARGET_HAS_andc_i32) {
1868 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
1869 } else {
1870 TCGv_i32 t0 = tcg_temp_new_i32();
1871 tcg_gen_not_i32(t0, arg2);
1872 tcg_gen_and_i32(ret, arg1, t0);
1873 tcg_temp_free_i32(t0);
1874 }
1875 }
1876
1877 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1878 {
1879 #if TCG_TARGET_REG_BITS == 64
1880 if (TCG_TARGET_HAS_andc_i64) {
1881 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1882 } else {
1883 TCGv_i64 t0 = tcg_temp_new_i64();
1884 tcg_gen_not_i64(t0, arg2);
1885 tcg_gen_and_i64(ret, arg1, t0);
1886 tcg_temp_free_i64(t0);
1887 }
1888 #else
1889 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1890 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1891 #endif
1892 }
1893
1894 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1895 {
1896 if (TCG_TARGET_HAS_eqv_i32) {
1897 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
1898 } else {
1899 tcg_gen_xor_i32(ret, arg1, arg2);
1900 tcg_gen_not_i32(ret, ret);
1901 }
1902 }
1903
1904 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1905 {
1906 #if TCG_TARGET_REG_BITS == 64
1907 if (TCG_TARGET_HAS_eqv_i64) {
1908 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1909 } else {
1910 tcg_gen_xor_i64(ret, arg1, arg2);
1911 tcg_gen_not_i64(ret, ret);
1912 }
1913 #else
1914 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1915 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1916 #endif
1917 }
1918
1919 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1920 {
1921 if (TCG_TARGET_HAS_nand_i32) {
1922 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
1923 } else {
1924 tcg_gen_and_i32(ret, arg1, arg2);
1925 tcg_gen_not_i32(ret, ret);
1926 }
1927 }
1928
1929 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1930 {
1931 #if TCG_TARGET_REG_BITS == 64
1932 if (TCG_TARGET_HAS_nand_i64) {
1933 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1934 } else {
1935 tcg_gen_and_i64(ret, arg1, arg2);
1936 tcg_gen_not_i64(ret, ret);
1937 }
1938 #else
1939 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1940 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1941 #endif
1942 }
1943
1944 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1945 {
1946 if (TCG_TARGET_HAS_nor_i32) {
1947 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
1948 } else {
1949 tcg_gen_or_i32(ret, arg1, arg2);
1950 tcg_gen_not_i32(ret, ret);
1951 }
1952 }
1953
1954 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1955 {
1956 #if TCG_TARGET_REG_BITS == 64
1957 if (TCG_TARGET_HAS_nor_i64) {
1958 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1959 } else {
1960 tcg_gen_or_i64(ret, arg1, arg2);
1961 tcg_gen_not_i64(ret, ret);
1962 }
1963 #else
1964 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1965 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1966 #endif
1967 }
1968
1969 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1970 {
1971 if (TCG_TARGET_HAS_orc_i32) {
1972 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
1973 } else {
1974 TCGv_i32 t0 = tcg_temp_new_i32();
1975 tcg_gen_not_i32(t0, arg2);
1976 tcg_gen_or_i32(ret, arg1, t0);
1977 tcg_temp_free_i32(t0);
1978 }
1979 }
1980
1981 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1982 {
1983 #if TCG_TARGET_REG_BITS == 64
1984 if (TCG_TARGET_HAS_orc_i64) {
1985 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1986 } else {
1987 TCGv_i64 t0 = tcg_temp_new_i64();
1988 tcg_gen_not_i64(t0, arg2);
1989 tcg_gen_or_i64(ret, arg1, t0);
1990 tcg_temp_free_i64(t0);
1991 }
1992 #else
1993 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1994 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1995 #endif
1996 }
1997
1998 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1999 {
2000 if (TCG_TARGET_HAS_rot_i32) {
2001 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
2002 } else {
2003 TCGv_i32 t0, t1;
2004
2005 t0 = tcg_temp_new_i32();
2006 t1 = tcg_temp_new_i32();
2007 tcg_gen_shl_i32(t0, arg1, arg2);
2008 tcg_gen_subfi_i32(t1, 32, arg2);
2009 tcg_gen_shr_i32(t1, arg1, t1);
2010 tcg_gen_or_i32(ret, t0, t1);
2011 tcg_temp_free_i32(t0);
2012 tcg_temp_free_i32(t1);
2013 }
2014 }
2015
2016 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2017 {
2018 if (TCG_TARGET_HAS_rot_i64) {
2019 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2020 } else {
2021 TCGv_i64 t0, t1;
2022 t0 = tcg_temp_new_i64();
2023 t1 = tcg_temp_new_i64();
2024 tcg_gen_shl_i64(t0, arg1, arg2);
2025 tcg_gen_subfi_i64(t1, 64, arg2);
2026 tcg_gen_shr_i64(t1, arg1, t1);
2027 tcg_gen_or_i64(ret, t0, t1);
2028 tcg_temp_free_i64(t0);
2029 tcg_temp_free_i64(t1);
2030 }
2031 }
2032
2033 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2034 {
2035 /* some cases can be optimized here */
2036 if (arg2 == 0) {
2037 tcg_gen_mov_i32(ret, arg1);
2038 } else if (TCG_TARGET_HAS_rot_i32) {
2039 TCGv_i32 t0 = tcg_const_i32(arg2);
2040 tcg_gen_rotl_i32(ret, arg1, t0);
2041 tcg_temp_free_i32(t0);
2042 } else {
2043 TCGv_i32 t0, t1;
2044 t0 = tcg_temp_new_i32();
2045 t1 = tcg_temp_new_i32();
2046 tcg_gen_shli_i32(t0, arg1, arg2);
2047 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
2048 tcg_gen_or_i32(ret, t0, t1);
2049 tcg_temp_free_i32(t0);
2050 tcg_temp_free_i32(t1);
2051 }
2052 }
2053
2054 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2055 {
2056 /* some cases can be optimized here */
2057 if (arg2 == 0) {
2058 tcg_gen_mov_i64(ret, arg1);
2059 } else if (TCG_TARGET_HAS_rot_i64) {
2060 TCGv_i64 t0 = tcg_const_i64(arg2);
2061 tcg_gen_rotl_i64(ret, arg1, t0);
2062 tcg_temp_free_i64(t0);
2063 } else {
2064 TCGv_i64 t0, t1;
2065 t0 = tcg_temp_new_i64();
2066 t1 = tcg_temp_new_i64();
2067 tcg_gen_shli_i64(t0, arg1, arg2);
2068 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2069 tcg_gen_or_i64(ret, t0, t1);
2070 tcg_temp_free_i64(t0);
2071 tcg_temp_free_i64(t1);
2072 }
2073 }
2074
2075 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2076 {
2077 if (TCG_TARGET_HAS_rot_i32) {
2078 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
2079 } else {
2080 TCGv_i32 t0, t1;
2081
2082 t0 = tcg_temp_new_i32();
2083 t1 = tcg_temp_new_i32();
2084 tcg_gen_shr_i32(t0, arg1, arg2);
2085 tcg_gen_subfi_i32(t1, 32, arg2);
2086 tcg_gen_shl_i32(t1, arg1, t1);
2087 tcg_gen_or_i32(ret, t0, t1);
2088 tcg_temp_free_i32(t0);
2089 tcg_temp_free_i32(t1);
2090 }
2091 }
2092
2093 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2094 {
2095 if (TCG_TARGET_HAS_rot_i64) {
2096 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2097 } else {
2098 TCGv_i64 t0, t1;
2099 t0 = tcg_temp_new_i64();
2100 t1 = tcg_temp_new_i64();
2101 tcg_gen_shr_i64(t0, arg1, arg2);
2102 tcg_gen_subfi_i64(t1, 64, arg2);
2103 tcg_gen_shl_i64(t1, arg1, t1);
2104 tcg_gen_or_i64(ret, t0, t1);
2105 tcg_temp_free_i64(t0);
2106 tcg_temp_free_i64(t1);
2107 }
2108 }
2109
2110 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2111 {
2112 /* some cases can be optimized here */
2113 if (arg2 == 0) {
2114 tcg_gen_mov_i32(ret, arg1);
2115 } else {
2116 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
2117 }
2118 }
2119
2120 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2121 {
2122 /* some cases can be optimized here */
2123 if (arg2 == 0) {
2124 tcg_gen_mov_i64(ret, arg1);
2125 } else {
2126 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2127 }
2128 }
2129
2130 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1,
2131 TCGv_i32 arg2, unsigned int ofs,
2132 unsigned int len)
2133 {
2134 uint32_t mask;
2135 TCGv_i32 t1;
2136
2137 tcg_debug_assert(ofs < 32);
2138 tcg_debug_assert(len <= 32);
2139 tcg_debug_assert(ofs + len <= 32);
2140
2141 if (ofs == 0 && len == 32) {
2142 tcg_gen_mov_i32(ret, arg2);
2143 return;
2144 }
2145 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
2146 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
2147 return;
2148 }
2149
2150 mask = (1u << len) - 1;
2151 t1 = tcg_temp_new_i32();
2152
2153 if (ofs + len < 32) {
2154 tcg_gen_andi_i32(t1, arg2, mask);
2155 tcg_gen_shli_i32(t1, t1, ofs);
2156 } else {
2157 tcg_gen_shli_i32(t1, arg2, ofs);
2158 }
2159 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
2160 tcg_gen_or_i32(ret, ret, t1);
2161
2162 tcg_temp_free_i32(t1);
2163 }
2164
2165 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1,
2166 TCGv_i64 arg2, unsigned int ofs,
2167 unsigned int len)
2168 {
2169 uint64_t mask;
2170 TCGv_i64 t1;
2171
2172 tcg_debug_assert(ofs < 64);
2173 tcg_debug_assert(len <= 64);
2174 tcg_debug_assert(ofs + len <= 64);
2175
2176 if (ofs == 0 && len == 64) {
2177 tcg_gen_mov_i64(ret, arg2);
2178 return;
2179 }
2180 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2181 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2182 return;
2183 }
2184
2185 #if TCG_TARGET_REG_BITS == 32
2186 if (ofs >= 32) {
2187 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2188 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2189 TCGV_LOW(arg2), ofs - 32, len);
2190 return;
2191 }
2192 if (ofs + len <= 32) {
2193 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2194 TCGV_LOW(arg2), ofs, len);
2195 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2196 return;
2197 }
2198 #endif
2199
2200 mask = (1ull << len) - 1;
2201 t1 = tcg_temp_new_i64();
2202
2203 if (ofs + len < 64) {
2204 tcg_gen_andi_i64(t1, arg2, mask);
2205 tcg_gen_shli_i64(t1, t1, ofs);
2206 } else {
2207 tcg_gen_shli_i64(t1, arg2, ofs);
2208 }
2209 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2210 tcg_gen_or_i64(ret, ret, t1);
2211
2212 tcg_temp_free_i64(t1);
2213 }
2214
2215 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low,
2216 TCGv_i32 high)
2217 {
2218 #if TCG_TARGET_REG_BITS == 32
2219 tcg_gen_mov_i32(TCGV_LOW(dest), low);
2220 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2221 #else
2222 TCGv_i64 tmp = tcg_temp_new_i64();
2223 /* These extensions are only needed for type correctness.
2224 We may be able to do better given target specific information. */
2225 tcg_gen_extu_i32_i64(tmp, high);
2226 tcg_gen_extu_i32_i64(dest, low);
2227 /* If deposit is available, use it. Otherwise use the extra
2228 knowledge that we have of the zero-extensions above. */
2229 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2230 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2231 } else {
2232 tcg_gen_shli_i64(tmp, tmp, 32);
2233 tcg_gen_or_i64(dest, dest, tmp);
2234 }
2235 tcg_temp_free_i64(tmp);
2236 #endif
2237 }
2238
2239 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low,
2240 TCGv_i64 high)
2241 {
2242 tcg_gen_deposit_i64(dest, low, high, 32, 32);
2243 }
2244
2245 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret,
2246 TCGv_i32 c1, TCGv_i32 c2,
2247 TCGv_i32 v1, TCGv_i32 v2)
2248 {
2249 if (TCG_TARGET_HAS_movcond_i32) {
2250 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
2251 } else {
2252 TCGv_i32 t0 = tcg_temp_new_i32();
2253 TCGv_i32 t1 = tcg_temp_new_i32();
2254 tcg_gen_setcond_i32(cond, t0, c1, c2);
2255 tcg_gen_neg_i32(t0, t0);
2256 tcg_gen_and_i32(t1, v1, t0);
2257 tcg_gen_andc_i32(ret, v2, t0);
2258 tcg_gen_or_i32(ret, ret, t1);
2259 tcg_temp_free_i32(t0);
2260 tcg_temp_free_i32(t1);
2261 }
2262 }
2263
2264 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret,
2265 TCGv_i64 c1, TCGv_i64 c2,
2266 TCGv_i64 v1, TCGv_i64 v2)
2267 {
2268 #if TCG_TARGET_REG_BITS == 32
2269 TCGv_i32 t0 = tcg_temp_new_i32();
2270 TCGv_i32 t1 = tcg_temp_new_i32();
2271 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2272 TCGV_LOW(c1), TCGV_HIGH(c1),
2273 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2274
2275 if (TCG_TARGET_HAS_movcond_i32) {
2276 tcg_gen_movi_i32(t1, 0);
2277 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2278 TCGV_LOW(v1), TCGV_LOW(v2));
2279 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2280 TCGV_HIGH(v1), TCGV_HIGH(v2));
2281 } else {
2282 tcg_gen_neg_i32(t0, t0);
2283
2284 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2285 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2286 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2287
2288 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2289 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2290 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2291 }
2292 tcg_temp_free_i32(t0);
2293 tcg_temp_free_i32(t1);
2294 #else
2295 if (TCG_TARGET_HAS_movcond_i64) {
2296 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2297 } else {
2298 TCGv_i64 t0 = tcg_temp_new_i64();
2299 TCGv_i64 t1 = tcg_temp_new_i64();
2300 tcg_gen_setcond_i64(cond, t0, c1, c2);
2301 tcg_gen_neg_i64(t0, t0);
2302 tcg_gen_and_i64(t1, v1, t0);
2303 tcg_gen_andc_i64(ret, v2, t0);
2304 tcg_gen_or_i64(ret, ret, t1);
2305 tcg_temp_free_i64(t0);
2306 tcg_temp_free_i64(t1);
2307 }
2308 #endif
2309 }
2310
2311 /***************************************/
2312 /* QEMU specific operations. Their type depend on the QEMU CPU
2313 type. */
2314 #ifndef TARGET_LONG_BITS
2315 #error must include QEMU headers
2316 #endif
2317
2318 #if TARGET_LONG_BITS == 32
2319 #define TCGv TCGv_i32
2320 #define tcg_temp_new() tcg_temp_new_i32()
2321 #define tcg_global_reg_new tcg_global_reg_new_i32
2322 #define tcg_global_mem_new tcg_global_mem_new_i32
2323 #define tcg_temp_local_new() tcg_temp_local_new_i32()
2324 #define tcg_temp_free tcg_temp_free_i32
2325 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i32
2326 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i32
2327 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2328 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2329 #else
2330 #define TCGv TCGv_i64
2331 #define tcg_temp_new() tcg_temp_new_i64()
2332 #define tcg_global_reg_new tcg_global_reg_new_i64
2333 #define tcg_global_mem_new tcg_global_mem_new_i64
2334 #define tcg_temp_local_new() tcg_temp_local_new_i64()
2335 #define tcg_temp_free tcg_temp_free_i64
2336 #define tcg_gen_qemu_ldst_op tcg_gen_op3i_i64
2337 #define tcg_gen_qemu_ldst_op_i64 tcg_gen_qemu_ldst_op_i64_i64
2338 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2339 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2340 #endif
2341
2342 /* debug info: write the PC of the corresponding QEMU CPU instruction */
2343 static inline void tcg_gen_debug_insn_start(uint64_t pc)
2344 {
2345 /* XXX: must really use a 32 bit size for TCGArg in all cases */
2346 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2347 tcg_gen_op2ii(INDEX_op_debug_insn_start,
2348 (uint32_t)(pc), (uint32_t)(pc >> 32));
2349 #else
2350 tcg_gen_op1i(INDEX_op_debug_insn_start, pc);
2351 #endif
2352 }
2353
2354 static inline void tcg_gen_exit_tb(tcg_target_long val)
2355 {
2356 tcg_gen_op1i(INDEX_op_exit_tb, val);
2357 }
2358
2359 static inline void tcg_gen_goto_tb(unsigned idx)
2360 {
2361 /* We only support two chained exits. */
2362 tcg_debug_assert(idx <= 1);
2363 #ifdef CONFIG_DEBUG_TCG
2364 /* Verify that we havn't seen this numbered exit before. */
2365 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
2366 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
2367 #endif
2368 tcg_gen_op1i(INDEX_op_goto_tb, idx);
2369 }
2370
2371 #if TCG_TARGET_REG_BITS == 32
2372 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2373 {
2374 #if TARGET_LONG_BITS == 32
2375 tcg_gen_op3i_i32(INDEX_op_qemu_ld8u, ret, addr, mem_index);
2376 #else
2377 tcg_gen_op4i_i32(INDEX_op_qemu_ld8u, TCGV_LOW(ret), TCGV_LOW(addr),
2378 TCGV_HIGH(addr), mem_index);
2379 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2380 #endif
2381 }
2382
2383 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2384 {
2385 #if TARGET_LONG_BITS == 32
2386 tcg_gen_op3i_i32(INDEX_op_qemu_ld8s, ret, addr, mem_index);
2387 #else
2388 tcg_gen_op4i_i32(INDEX_op_qemu_ld8s, TCGV_LOW(ret), TCGV_LOW(addr),
2389 TCGV_HIGH(addr), mem_index);
2390 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2391 #endif
2392 }
2393
2394 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2395 {
2396 #if TARGET_LONG_BITS == 32
2397 tcg_gen_op3i_i32(INDEX_op_qemu_ld16u, ret, addr, mem_index);
2398 #else
2399 tcg_gen_op4i_i32(INDEX_op_qemu_ld16u, TCGV_LOW(ret), TCGV_LOW(addr),
2400 TCGV_HIGH(addr), mem_index);
2401 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2402 #endif
2403 }
2404
2405 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2406 {
2407 #if TARGET_LONG_BITS == 32
2408 tcg_gen_op3i_i32(INDEX_op_qemu_ld16s, ret, addr, mem_index);
2409 #else
2410 tcg_gen_op4i_i32(INDEX_op_qemu_ld16s, TCGV_LOW(ret), TCGV_LOW(addr),
2411 TCGV_HIGH(addr), mem_index);
2412 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2413 #endif
2414 }
2415
2416 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2417 {
2418 #if TARGET_LONG_BITS == 32
2419 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
2420 #else
2421 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
2422 TCGV_HIGH(addr), mem_index);
2423 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2424 #endif
2425 }
2426
2427 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2428 {
2429 #if TARGET_LONG_BITS == 32
2430 tcg_gen_op3i_i32(INDEX_op_qemu_ld32, ret, addr, mem_index);
2431 #else
2432 tcg_gen_op4i_i32(INDEX_op_qemu_ld32, TCGV_LOW(ret), TCGV_LOW(addr),
2433 TCGV_HIGH(addr), mem_index);
2434 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2435 #endif
2436 }
2437
2438 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2439 {
2440 #if TARGET_LONG_BITS == 32
2441 tcg_gen_op4i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret), addr, mem_index);
2442 #else
2443 tcg_gen_op5i_i32(INDEX_op_qemu_ld64, TCGV_LOW(ret), TCGV_HIGH(ret),
2444 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
2445 #endif
2446 }
2447
2448 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2449 {
2450 #if TARGET_LONG_BITS == 32
2451 tcg_gen_op3i_i32(INDEX_op_qemu_st8, arg, addr, mem_index);
2452 #else
2453 tcg_gen_op4i_i32(INDEX_op_qemu_st8, TCGV_LOW(arg), TCGV_LOW(addr),
2454 TCGV_HIGH(addr), mem_index);
2455 #endif
2456 }
2457
2458 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2459 {
2460 #if TARGET_LONG_BITS == 32
2461 tcg_gen_op3i_i32(INDEX_op_qemu_st16, arg, addr, mem_index);
2462 #else
2463 tcg_gen_op4i_i32(INDEX_op_qemu_st16, TCGV_LOW(arg), TCGV_LOW(addr),
2464 TCGV_HIGH(addr), mem_index);
2465 #endif
2466 }
2467
2468 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2469 {
2470 #if TARGET_LONG_BITS == 32
2471 tcg_gen_op3i_i32(INDEX_op_qemu_st32, arg, addr, mem_index);
2472 #else
2473 tcg_gen_op4i_i32(INDEX_op_qemu_st32, TCGV_LOW(arg), TCGV_LOW(addr),
2474 TCGV_HIGH(addr), mem_index);
2475 #endif
2476 }
2477
2478 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2479 {
2480 #if TARGET_LONG_BITS == 32
2481 tcg_gen_op4i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg), addr,
2482 mem_index);
2483 #else
2484 tcg_gen_op5i_i32(INDEX_op_qemu_st64, TCGV_LOW(arg), TCGV_HIGH(arg),
2485 TCGV_LOW(addr), TCGV_HIGH(addr), mem_index);
2486 #endif
2487 }
2488
2489 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2490 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2491
2492 #else /* TCG_TARGET_REG_BITS == 32 */
2493
2494 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2495 {
2496 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8u, ret, addr, mem_index);
2497 }
2498
2499 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2500 {
2501 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld8s, ret, addr, mem_index);
2502 }
2503
2504 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2505 {
2506 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16u, ret, addr, mem_index);
2507 }
2508
2509 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2510 {
2511 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld16s, ret, addr, mem_index);
2512 }
2513
2514 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2515 {
2516 #if TARGET_LONG_BITS == 32
2517 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
2518 #else
2519 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32u, ret, addr, mem_index);
2520 #endif
2521 }
2522
2523 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2524 {
2525 #if TARGET_LONG_BITS == 32
2526 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32, ret, addr, mem_index);
2527 #else
2528 tcg_gen_qemu_ldst_op(INDEX_op_qemu_ld32s, ret, addr, mem_index);
2529 #endif
2530 }
2531
2532 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2533 {
2534 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_ld64, ret, addr, mem_index);
2535 }
2536
2537 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2538 {
2539 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st8, arg, addr, mem_index);
2540 }
2541
2542 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2543 {
2544 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st16, arg, addr, mem_index);
2545 }
2546
2547 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2548 {
2549 tcg_gen_qemu_ldst_op(INDEX_op_qemu_st32, arg, addr, mem_index);
2550 }
2551
2552 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2553 {
2554 tcg_gen_qemu_ldst_op_i64(INDEX_op_qemu_st64, arg, addr, mem_index);
2555 }
2556
2557 #define tcg_gen_ld_ptr(R, A, O) tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2558 #define tcg_gen_discard_ptr(A) tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2559
2560 #endif /* TCG_TARGET_REG_BITS != 32 */
2561
2562 #if TARGET_LONG_BITS == 64
2563 #define tcg_gen_movi_tl tcg_gen_movi_i64
2564 #define tcg_gen_mov_tl tcg_gen_mov_i64
2565 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2566 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2567 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2568 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2569 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2570 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2571 #define tcg_gen_ld_tl tcg_gen_ld_i64
2572 #define tcg_gen_st8_tl tcg_gen_st8_i64
2573 #define tcg_gen_st16_tl tcg_gen_st16_i64
2574 #define tcg_gen_st32_tl tcg_gen_st32_i64
2575 #define tcg_gen_st_tl tcg_gen_st_i64
2576 #define tcg_gen_add_tl tcg_gen_add_i64
2577 #define tcg_gen_addi_tl tcg_gen_addi_i64
2578 #define tcg_gen_sub_tl tcg_gen_sub_i64
2579 #define tcg_gen_neg_tl tcg_gen_neg_i64
2580 #define tcg_gen_subfi_tl tcg_gen_subfi_i64
2581 #define tcg_gen_subi_tl tcg_gen_subi_i64
2582 #define tcg_gen_and_tl tcg_gen_and_i64
2583 #define tcg_gen_andi_tl tcg_gen_andi_i64
2584 #define tcg_gen_or_tl tcg_gen_or_i64
2585 #define tcg_gen_ori_tl tcg_gen_ori_i64
2586 #define tcg_gen_xor_tl tcg_gen_xor_i64
2587 #define tcg_gen_xori_tl tcg_gen_xori_i64
2588 #define tcg_gen_not_tl tcg_gen_not_i64
2589 #define tcg_gen_shl_tl tcg_gen_shl_i64
2590 #define tcg_gen_shli_tl tcg_gen_shli_i64
2591 #define tcg_gen_shr_tl tcg_gen_shr_i64
2592 #define tcg_gen_shri_tl tcg_gen_shri_i64
2593 #define tcg_gen_sar_tl tcg_gen_sar_i64
2594 #define tcg_gen_sari_tl tcg_gen_sari_i64
2595 #define tcg_gen_brcond_tl tcg_gen_brcond_i64
2596 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2597 #define tcg_gen_setcond_tl tcg_gen_setcond_i64
2598 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2599 #define tcg_gen_mul_tl tcg_gen_mul_i64
2600 #define tcg_gen_muli_tl tcg_gen_muli_i64
2601 #define tcg_gen_div_tl tcg_gen_div_i64
2602 #define tcg_gen_rem_tl tcg_gen_rem_i64
2603 #define tcg_gen_divu_tl tcg_gen_divu_i64
2604 #define tcg_gen_remu_tl tcg_gen_remu_i64
2605 #define tcg_gen_discard_tl tcg_gen_discard_i64
2606 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2607 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2608 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2609 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2610 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2611 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2612 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2613 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2614 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2615 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2616 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2617 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2618 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2619 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2620 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2621 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2622 #define tcg_gen_andc_tl tcg_gen_andc_i64
2623 #define tcg_gen_eqv_tl tcg_gen_eqv_i64
2624 #define tcg_gen_nand_tl tcg_gen_nand_i64
2625 #define tcg_gen_nor_tl tcg_gen_nor_i64
2626 #define tcg_gen_orc_tl tcg_gen_orc_i64
2627 #define tcg_gen_rotl_tl tcg_gen_rotl_i64
2628 #define tcg_gen_rotli_tl tcg_gen_rotli_i64
2629 #define tcg_gen_rotr_tl tcg_gen_rotr_i64
2630 #define tcg_gen_rotri_tl tcg_gen_rotri_i64
2631 #define tcg_gen_deposit_tl tcg_gen_deposit_i64
2632 #define tcg_const_tl tcg_const_i64
2633 #define tcg_const_local_tl tcg_const_local_i64
2634 #define tcg_gen_movcond_tl tcg_gen_movcond_i64
2635 #else
2636 #define tcg_gen_movi_tl tcg_gen_movi_i32
2637 #define tcg_gen_mov_tl tcg_gen_mov_i32
2638 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2639 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2640 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2641 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2642 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
2643 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
2644 #define tcg_gen_ld_tl tcg_gen_ld_i32
2645 #define tcg_gen_st8_tl tcg_gen_st8_i32
2646 #define tcg_gen_st16_tl tcg_gen_st16_i32
2647 #define tcg_gen_st32_tl tcg_gen_st_i32
2648 #define tcg_gen_st_tl tcg_gen_st_i32
2649 #define tcg_gen_add_tl tcg_gen_add_i32
2650 #define tcg_gen_addi_tl tcg_gen_addi_i32
2651 #define tcg_gen_sub_tl tcg_gen_sub_i32
2652 #define tcg_gen_neg_tl tcg_gen_neg_i32
2653 #define tcg_gen_subfi_tl tcg_gen_subfi_i32
2654 #define tcg_gen_subi_tl tcg_gen_subi_i32
2655 #define tcg_gen_and_tl tcg_gen_and_i32
2656 #define tcg_gen_andi_tl tcg_gen_andi_i32
2657 #define tcg_gen_or_tl tcg_gen_or_i32
2658 #define tcg_gen_ori_tl tcg_gen_ori_i32
2659 #define tcg_gen_xor_tl tcg_gen_xor_i32
2660 #define tcg_gen_xori_tl tcg_gen_xori_i32
2661 #define tcg_gen_not_tl tcg_gen_not_i32
2662 #define tcg_gen_shl_tl tcg_gen_shl_i32
2663 #define tcg_gen_shli_tl tcg_gen_shli_i32
2664 #define tcg_gen_shr_tl tcg_gen_shr_i32
2665 #define tcg_gen_shri_tl tcg_gen_shri_i32
2666 #define tcg_gen_sar_tl tcg_gen_sar_i32
2667 #define tcg_gen_sari_tl tcg_gen_sari_i32
2668 #define tcg_gen_brcond_tl tcg_gen_brcond_i32
2669 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2670 #define tcg_gen_setcond_tl tcg_gen_setcond_i32
2671 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2672 #define tcg_gen_mul_tl tcg_gen_mul_i32
2673 #define tcg_gen_muli_tl tcg_gen_muli_i32
2674 #define tcg_gen_div_tl tcg_gen_div_i32
2675 #define tcg_gen_rem_tl tcg_gen_rem_i32
2676 #define tcg_gen_divu_tl tcg_gen_divu_i32
2677 #define tcg_gen_remu_tl tcg_gen_remu_i32
2678 #define tcg_gen_discard_tl tcg_gen_discard_i32
2679 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2680 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2681 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2682 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2683 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2684 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2685 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2686 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2687 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2688 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2689 #define tcg_gen_ext32u_tl tcg_gen_mov_i32
2690 #define tcg_gen_ext32s_tl tcg_gen_mov_i32
2691 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2692 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2693 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2694 #define tcg_gen_andc_tl tcg_gen_andc_i32
2695 #define tcg_gen_eqv_tl tcg_gen_eqv_i32
2696 #define tcg_gen_nand_tl tcg_gen_nand_i32
2697 #define tcg_gen_nor_tl tcg_gen_nor_i32
2698 #define tcg_gen_orc_tl tcg_gen_orc_i32
2699 #define tcg_gen_rotl_tl tcg_gen_rotl_i32
2700 #define tcg_gen_rotli_tl tcg_gen_rotli_i32
2701 #define tcg_gen_rotr_tl tcg_gen_rotr_i32
2702 #define tcg_gen_rotri_tl tcg_gen_rotri_i32
2703 #define tcg_gen_deposit_tl tcg_gen_deposit_i32
2704 #define tcg_const_tl tcg_const_i32
2705 #define tcg_const_local_tl tcg_const_local_i32
2706 #define tcg_gen_movcond_tl tcg_gen_movcond_i32
2707 #endif
2708
2709 #if TCG_TARGET_REG_BITS == 32
2710 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), \
2711 TCGV_PTR_TO_NAT(A), \
2712 TCGV_PTR_TO_NAT(B))
2713 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), \
2714 TCGV_PTR_TO_NAT(A), (B))
2715 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
2716 #else /* TCG_TARGET_REG_BITS == 32 */
2717 #define tcg_gen_add_ptr(R, A, B) tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), \
2718 TCGV_PTR_TO_NAT(A), \
2719 TCGV_PTR_TO_NAT(B))
2720 #define tcg_gen_addi_ptr(R, A, B) tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), \
2721 TCGV_PTR_TO_NAT(A), (B))
2722 #define tcg_gen_ext_i32_ptr(R, A) tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
2723 #endif /* TCG_TARGET_REG_BITS != 32 */