]> git.proxmox.com Git - mirror_qemu.git/blob - tcg/mips/tcg-target.c
tcg/mips: implement deposit op on MIPS32R2
[mirror_qemu.git] / tcg / mips / tcg-target.c
1 /*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
25 */
26
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
29 #else
30 # define TCG_NEED_BSWAP 1
31 #endif
32
33 #ifndef NDEBUG
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
35 "zero",
36 "at",
37 "v0",
38 "v1",
39 "a0",
40 "a1",
41 "a2",
42 "a3",
43 "t0",
44 "t1",
45 "t2",
46 "t3",
47 "t4",
48 "t5",
49 "t6",
50 "t7",
51 "s0",
52 "s1",
53 "s2",
54 "s3",
55 "s4",
56 "s5",
57 "s6",
58 "s7",
59 "t8",
60 "t9",
61 "k0",
62 "k1",
63 "gp",
64 "sp",
65 "fp",
66 "ra",
67 };
68 #endif
69
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
72 TCG_REG_S0,
73 TCG_REG_S1,
74 TCG_REG_S2,
75 TCG_REG_S3,
76 TCG_REG_S4,
77 TCG_REG_S5,
78 TCG_REG_S6,
79 TCG_REG_S7,
80 TCG_REG_T1,
81 TCG_REG_T2,
82 TCG_REG_T3,
83 TCG_REG_T4,
84 TCG_REG_T5,
85 TCG_REG_T6,
86 TCG_REG_T7,
87 TCG_REG_T8,
88 TCG_REG_T9,
89 TCG_REG_A0,
90 TCG_REG_A1,
91 TCG_REG_A2,
92 TCG_REG_A3,
93 TCG_REG_V0,
94 TCG_REG_V1
95 };
96
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
98 TCG_REG_A0,
99 TCG_REG_A1,
100 TCG_REG_A2,
101 TCG_REG_A3
102 };
103
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
105 TCG_REG_V0,
106 TCG_REG_V1
107 };
108
109 static uint8_t *tb_ret_addr;
110
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
112 {
113 return target & 0xffff;
114 }
115
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
117 {
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
120 }
121
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
123 {
124 return (target >> 16) & 0xffff;
125 }
126
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
128 {
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
131 }
132
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
134 {
135 int32_t disp;
136
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
139 tcg_abort ();
140 }
141
142 return (disp >> 2) & 0xffff;
143 }
144
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
146 {
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
149 }
150
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
152 {
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
154 tcg_abort ();
155 }
156
157 return (target >> 2) & 0x3ffffff;
158 }
159
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
161 {
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
164 }
165
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
168 {
169 value += addend;
170 switch(type) {
171 case R_MIPS_LO16:
172 reloc_lo16(code_ptr, value);
173 break;
174 case R_MIPS_HI16:
175 reloc_hi16(code_ptr, value);
176 break;
177 case R_MIPS_PC16:
178 reloc_pc16(code_ptr, value);
179 break;
180 case R_MIPS_26:
181 reloc_pc26(code_ptr, value);
182 break;
183 default:
184 tcg_abort();
185 }
186 }
187
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags)
190 {
191 return 4;
192 }
193
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
196 {
197 const char *ct_str;
198
199 ct_str = *pct_str;
200 switch(ct_str[0]) {
201 case 'r':
202 ct->ct |= TCG_CT_REG;
203 tcg_regset_set(ct->u.regs, 0xffffffff);
204 break;
205 case 'C':
206 ct->ct |= TCG_CT_REG;
207 tcg_regset_clear(ct->u.regs);
208 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
209 break;
210 case 'L': /* qemu_ld output arg constraint */
211 ct->ct |= TCG_CT_REG;
212 tcg_regset_set(ct->u.regs, 0xffffffff);
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
214 break;
215 case 'l': /* qemu_ld input arg constraint */
216 ct->ct |= TCG_CT_REG;
217 tcg_regset_set(ct->u.regs, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
222 # endif
223 #endif
224 break;
225 case 'S': /* qemu_st constraint */
226 ct->ct |= TCG_CT_REG;
227 tcg_regset_set(ct->u.regs, 0xffffffff);
228 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
232 # endif
233 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
236 # endif
237 #endif
238 break;
239 case 'I':
240 ct->ct |= TCG_CT_CONST_U16;
241 break;
242 case 'J':
243 ct->ct |= TCG_CT_CONST_S16;
244 break;
245 case 'Z':
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct->ct |= TCG_CT_CONST_ZERO;
250 break;
251 default:
252 return -1;
253 }
254 ct_str++;
255 *pct_str = ct_str;
256 return 0;
257 }
258
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val,
261 const TCGArgConstraint *arg_ct)
262 {
263 int ct;
264 ct = arg_ct->ct;
265 if (ct & TCG_CT_CONST)
266 return 1;
267 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
268 return 1;
269 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
270 return 1;
271 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
272 return 1;
273 else
274 return 0;
275 }
276
277 /* instruction opcodes */
278 enum {
279 OPC_BEQ = 0x04 << 26,
280 OPC_BNE = 0x05 << 26,
281 OPC_BLEZ = 0x06 << 26,
282 OPC_BGTZ = 0x07 << 26,
283 OPC_ADDIU = 0x09 << 26,
284 OPC_SLTI = 0x0A << 26,
285 OPC_SLTIU = 0x0B << 26,
286 OPC_ANDI = 0x0C << 26,
287 OPC_ORI = 0x0D << 26,
288 OPC_XORI = 0x0E << 26,
289 OPC_LUI = 0x0F << 26,
290 OPC_LB = 0x20 << 26,
291 OPC_LH = 0x21 << 26,
292 OPC_LW = 0x23 << 26,
293 OPC_LBU = 0x24 << 26,
294 OPC_LHU = 0x25 << 26,
295 OPC_LWU = 0x27 << 26,
296 OPC_SB = 0x28 << 26,
297 OPC_SH = 0x29 << 26,
298 OPC_SW = 0x2B << 26,
299
300 OPC_SPECIAL = 0x00 << 26,
301 OPC_SLL = OPC_SPECIAL | 0x00,
302 OPC_SRL = OPC_SPECIAL | 0x02,
303 OPC_ROTR = OPC_SPECIAL | (0x01 << 21) | 0x02,
304 OPC_SRA = OPC_SPECIAL | 0x03,
305 OPC_SLLV = OPC_SPECIAL | 0x04,
306 OPC_SRLV = OPC_SPECIAL | 0x06,
307 OPC_ROTRV = OPC_SPECIAL | (0x01 << 6) | 0x06,
308 OPC_SRAV = OPC_SPECIAL | 0x07,
309 OPC_JR = OPC_SPECIAL | 0x08,
310 OPC_JALR = OPC_SPECIAL | 0x09,
311 OPC_MFHI = OPC_SPECIAL | 0x10,
312 OPC_MFLO = OPC_SPECIAL | 0x12,
313 OPC_MULT = OPC_SPECIAL | 0x18,
314 OPC_MULTU = OPC_SPECIAL | 0x19,
315 OPC_DIV = OPC_SPECIAL | 0x1A,
316 OPC_DIVU = OPC_SPECIAL | 0x1B,
317 OPC_ADDU = OPC_SPECIAL | 0x21,
318 OPC_SUBU = OPC_SPECIAL | 0x23,
319 OPC_AND = OPC_SPECIAL | 0x24,
320 OPC_OR = OPC_SPECIAL | 0x25,
321 OPC_XOR = OPC_SPECIAL | 0x26,
322 OPC_NOR = OPC_SPECIAL | 0x27,
323 OPC_SLT = OPC_SPECIAL | 0x2A,
324 OPC_SLTU = OPC_SPECIAL | 0x2B,
325
326 OPC_REGIMM = 0x01 << 26,
327 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
328 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
329
330 OPC_SPECIAL3 = 0x1f << 26,
331 OPC_INS = OPC_SPECIAL3 | 0x004,
332 OPC_WSBH = OPC_SPECIAL3 | 0x0a0,
333 OPC_SEB = OPC_SPECIAL3 | 0x420,
334 OPC_SEH = OPC_SPECIAL3 | 0x620,
335 };
336
337 /*
338 * Type reg
339 */
340 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
341 TCGReg rd, TCGReg rs, TCGReg rt)
342 {
343 int32_t inst;
344
345 inst = opc;
346 inst |= (rs & 0x1F) << 21;
347 inst |= (rt & 0x1F) << 16;
348 inst |= (rd & 0x1F) << 11;
349 tcg_out32(s, inst);
350 }
351
352 /*
353 * Type immediate
354 */
355 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
356 TCGReg rt, TCGReg rs, TCGArg imm)
357 {
358 int32_t inst;
359
360 inst = opc;
361 inst |= (rs & 0x1F) << 21;
362 inst |= (rt & 0x1F) << 16;
363 inst |= (imm & 0xffff);
364 tcg_out32(s, inst);
365 }
366
367 /*
368 * Type branch
369 */
370 static inline void tcg_out_opc_br(TCGContext *s, int opc,
371 TCGReg rt, TCGReg rs)
372 {
373 /* We pay attention here to not modify the branch target by reading
374 the existing value and using it again. This ensure that caches and
375 memory are kept coherent during retranslation. */
376 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
377
378 tcg_out_opc_imm(s, opc, rt, rs, offset);
379 }
380
381 /*
382 * Type sa
383 */
384 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
385 TCGReg rd, TCGReg rt, TCGArg sa)
386 {
387 int32_t inst;
388
389 inst = opc;
390 inst |= (rt & 0x1F) << 16;
391 inst |= (rd & 0x1F) << 11;
392 inst |= (sa & 0x1F) << 6;
393 tcg_out32(s, inst);
394
395 }
396
397 static inline void tcg_out_nop(TCGContext *s)
398 {
399 tcg_out32(s, 0);
400 }
401
402 static inline void tcg_out_mov(TCGContext *s, TCGType type,
403 TCGReg ret, TCGReg arg)
404 {
405 /* Simple reg-reg move, optimising out the 'do nothing' case */
406 if (ret != arg) {
407 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
408 }
409 }
410
411 static inline void tcg_out_movi(TCGContext *s, TCGType type,
412 TCGReg reg, tcg_target_long arg)
413 {
414 if (arg == (int16_t)arg) {
415 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
416 } else if (arg == (uint16_t)arg) {
417 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
418 } else {
419 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
420 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
421 }
422 }
423
424 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
425 {
426 #ifdef _MIPS_ARCH_MIPS32R2
427 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
428 #else
429 /* ret and arg can't be register at */
430 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
431 tcg_abort();
432 }
433
434 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
435 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
436 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
437 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
438 #endif
439 }
440
441 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
442 {
443 #ifdef _MIPS_ARCH_MIPS32R2
444 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
445 tcg_out_opc_reg(s, OPC_SEH, ret, 0, ret);
446 #else
447 /* ret and arg can't be register at */
448 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
449 tcg_abort();
450 }
451
452 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
453 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
454 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
455 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
456 #endif
457 }
458
459 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
460 {
461 #ifdef _MIPS_ARCH_MIPS32R2
462 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
463 tcg_out_opc_sa(s, OPC_ROTR, ret, ret, 16);
464 #else
465 /* ret and arg must be different and can't be register at */
466 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
467 tcg_abort();
468 }
469
470 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
471
472 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
473 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
474
475 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
476 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
477 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
478
479 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
480 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
481 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
482 #endif
483 }
484
485 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
486 {
487 #ifdef _MIPS_ARCH_MIPS32R2
488 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
489 #else
490 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
491 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
492 #endif
493 }
494
495 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
496 {
497 #ifdef _MIPS_ARCH_MIPS32R2
498 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
499 #else
500 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
501 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
502 #endif
503 }
504
505 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
506 TCGReg arg1, TCGArg arg2)
507 {
508 if (arg2 == (int16_t) arg2) {
509 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
510 } else {
511 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
512 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
513 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
514 }
515 }
516
517 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
518 TCGReg arg1, tcg_target_long arg2)
519 {
520 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
521 }
522
523 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
524 TCGReg arg1, tcg_target_long arg2)
525 {
526 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
527 }
528
529 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
530 {
531 if (val == (int16_t)val) {
532 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
533 } else {
534 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
535 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
536 }
537 }
538
539 /* Helper routines for marshalling helper function arguments into
540 * the correct registers and stack.
541 * arg_num is where we want to put this argument, and is updated to be ready
542 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
543 * real registers, 4+ on stack.
544 *
545 * We provide routines for arguments which are: immediate, 32 bit
546 * value in register, 16 and 8 bit values in register (which must be zero
547 * extended before use) and 64 bit value in a lo:hi register pair.
548 */
549 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
550 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
551 { \
552 if (*arg_num < 4) { \
553 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
554 } else { \
555 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
556 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
557 } \
558 (*arg_num)++; \
559 }
560 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
561 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
562 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
563 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
564 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
565 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
566 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
567 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
568 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
569 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
570 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
571 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
572
573 /* We don't use the macro for this one to avoid an unnecessary reg-reg
574 move when storing to the stack. */
575 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
576 TCGReg arg)
577 {
578 if (*arg_num < 4) {
579 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
580 } else {
581 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
582 }
583 (*arg_num)++;
584 }
585
586 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
587 TCGReg arg_low, TCGReg arg_high)
588 {
589 (*arg_num) = (*arg_num + 1) & ~1;
590
591 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
592 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
593 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
594 #else
595 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
596 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
597 #endif
598 }
599
600 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
601 TCGArg arg2, int label_index)
602 {
603 TCGLabel *l = &s->labels[label_index];
604
605 switch (cond) {
606 case TCG_COND_EQ:
607 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
608 break;
609 case TCG_COND_NE:
610 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
611 break;
612 case TCG_COND_LT:
613 if (arg2 == 0) {
614 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
615 } else {
616 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
617 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
618 }
619 break;
620 case TCG_COND_LTU:
621 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
622 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
623 break;
624 case TCG_COND_GE:
625 if (arg2 == 0) {
626 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
627 } else {
628 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
629 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
630 }
631 break;
632 case TCG_COND_GEU:
633 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
634 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
635 break;
636 case TCG_COND_LE:
637 if (arg2 == 0) {
638 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
639 } else {
640 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
641 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
642 }
643 break;
644 case TCG_COND_LEU:
645 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
646 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
647 break;
648 case TCG_COND_GT:
649 if (arg2 == 0) {
650 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
651 } else {
652 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
653 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
654 }
655 break;
656 case TCG_COND_GTU:
657 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
658 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
659 break;
660 default:
661 tcg_abort();
662 break;
663 }
664 if (l->has_value) {
665 reloc_pc16(s->code_ptr - 4, l->u.value);
666 } else {
667 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
668 }
669 tcg_out_nop(s);
670 }
671
672 /* XXX: we implement it at the target level to avoid having to
673 handle cross basic blocks temporaries */
674 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
675 TCGArg arg2, TCGArg arg3, TCGArg arg4,
676 int label_index)
677 {
678 void *label_ptr;
679
680 switch(cond) {
681 case TCG_COND_NE:
682 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
683 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
684 return;
685 case TCG_COND_EQ:
686 break;
687 case TCG_COND_LT:
688 case TCG_COND_LE:
689 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
690 break;
691 case TCG_COND_GT:
692 case TCG_COND_GE:
693 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
694 break;
695 case TCG_COND_LTU:
696 case TCG_COND_LEU:
697 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
698 break;
699 case TCG_COND_GTU:
700 case TCG_COND_GEU:
701 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
702 break;
703 default:
704 tcg_abort();
705 }
706
707 label_ptr = s->code_ptr;
708 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
709 tcg_out_nop(s);
710
711 switch(cond) {
712 case TCG_COND_EQ:
713 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
714 break;
715 case TCG_COND_LT:
716 case TCG_COND_LTU:
717 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
718 break;
719 case TCG_COND_LE:
720 case TCG_COND_LEU:
721 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
722 break;
723 case TCG_COND_GT:
724 case TCG_COND_GTU:
725 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
726 break;
727 case TCG_COND_GE:
728 case TCG_COND_GEU:
729 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
730 break;
731 default:
732 tcg_abort();
733 }
734
735 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
736 }
737
738 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
739 TCGArg arg1, TCGArg arg2)
740 {
741 switch (cond) {
742 case TCG_COND_EQ:
743 if (arg1 == 0) {
744 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
745 } else if (arg2 == 0) {
746 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
747 } else {
748 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
749 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
750 }
751 break;
752 case TCG_COND_NE:
753 if (arg1 == 0) {
754 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
755 } else if (arg2 == 0) {
756 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
757 } else {
758 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
759 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
760 }
761 break;
762 case TCG_COND_LT:
763 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
764 break;
765 case TCG_COND_LTU:
766 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
767 break;
768 case TCG_COND_GE:
769 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
770 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
771 break;
772 case TCG_COND_GEU:
773 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
774 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
775 break;
776 case TCG_COND_LE:
777 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
778 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
779 break;
780 case TCG_COND_LEU:
781 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
782 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
783 break;
784 case TCG_COND_GT:
785 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
786 break;
787 case TCG_COND_GTU:
788 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
789 break;
790 default:
791 tcg_abort();
792 break;
793 }
794 }
795
796 /* XXX: we implement it at the target level to avoid having to
797 handle cross basic blocks temporaries */
798 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
799 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
800 {
801 switch (cond) {
802 case TCG_COND_EQ:
803 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
804 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
805 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
806 return;
807 case TCG_COND_NE:
808 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
809 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
810 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
811 return;
812 case TCG_COND_LT:
813 case TCG_COND_LE:
814 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
815 break;
816 case TCG_COND_GT:
817 case TCG_COND_GE:
818 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
819 break;
820 case TCG_COND_LTU:
821 case TCG_COND_LEU:
822 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
823 break;
824 case TCG_COND_GTU:
825 case TCG_COND_GEU:
826 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
827 break;
828 default:
829 tcg_abort();
830 break;
831 }
832
833 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
834
835 switch(cond) {
836 case TCG_COND_LT:
837 case TCG_COND_LTU:
838 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
839 break;
840 case TCG_COND_LE:
841 case TCG_COND_LEU:
842 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
843 break;
844 case TCG_COND_GT:
845 case TCG_COND_GTU:
846 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
847 break;
848 case TCG_COND_GE:
849 case TCG_COND_GEU:
850 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
851 break;
852 default:
853 tcg_abort();
854 }
855
856 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
857 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
858 }
859
860 #if defined(CONFIG_SOFTMMU)
861
862 #include "../../softmmu_defs.h"
863
864 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
865 int mmu_idx) */
866 static const void * const qemu_ld_helpers[4] = {
867 helper_ldb_mmu,
868 helper_ldw_mmu,
869 helper_ldl_mmu,
870 helper_ldq_mmu,
871 };
872
873 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
874 uintxx_t val, int mmu_idx) */
875 static const void * const qemu_st_helpers[4] = {
876 helper_stb_mmu,
877 helper_stw_mmu,
878 helper_stl_mmu,
879 helper_stq_mmu,
880 };
881 #endif
882
883 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
884 int opc)
885 {
886 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
887 #if defined(CONFIG_SOFTMMU)
888 void *label1_ptr, *label2_ptr;
889 int arg_num;
890 int mem_index, s_bits;
891 int addr_meml;
892 # if TARGET_LONG_BITS == 64
893 uint8_t *label3_ptr;
894 TCGReg addr_regh;
895 int addr_memh;
896 # endif
897 #endif
898 data_regl = *args++;
899 if (opc == 3)
900 data_regh = *args++;
901 else
902 data_regh = 0;
903 addr_regl = *args++;
904 #if defined(CONFIG_SOFTMMU)
905 # if TARGET_LONG_BITS == 64
906 addr_regh = *args++;
907 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
908 addr_memh = 0;
909 addr_meml = 4;
910 # else
911 addr_memh = 4;
912 addr_meml = 0;
913 # endif
914 # else
915 addr_meml = 0;
916 # endif
917 mem_index = *args;
918 s_bits = opc & 3;
919 #endif
920
921 if (opc == 3) {
922 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
923 data_reg1 = data_regh;
924 data_reg2 = data_regl;
925 #else
926 data_reg1 = data_regl;
927 data_reg2 = data_regh;
928 #endif
929 } else {
930 data_reg1 = data_regl;
931 data_reg2 = 0;
932 }
933 #if defined(CONFIG_SOFTMMU)
934 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
935 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
936 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
937 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
938 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
939 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
940 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
941
942 # if TARGET_LONG_BITS == 64
943 label3_ptr = s->code_ptr;
944 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
945 tcg_out_nop(s);
946
947 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
948 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
949
950 label1_ptr = s->code_ptr;
951 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
952 tcg_out_nop(s);
953
954 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
955 # else
956 label1_ptr = s->code_ptr;
957 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
958 tcg_out_nop(s);
959 # endif
960
961 /* slow path */
962 arg_num = 0;
963 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
964 # if TARGET_LONG_BITS == 64
965 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
966 # else
967 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
968 # endif
969 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
970 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
971 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
972 tcg_out_nop(s);
973
974 switch(opc) {
975 case 0:
976 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
977 break;
978 case 0 | 4:
979 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
980 break;
981 case 1:
982 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
983 break;
984 case 1 | 4:
985 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
986 break;
987 case 2:
988 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
989 break;
990 case 3:
991 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
992 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
993 break;
994 default:
995 tcg_abort();
996 }
997
998 label2_ptr = s->code_ptr;
999 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1000 tcg_out_nop(s);
1001
1002 /* label1: fast path */
1003 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1004
1005 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1006 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1007 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
1008 #else
1009 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1010 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
1011 } else {
1012 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
1013 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1014 }
1015 #endif
1016
1017 switch(opc) {
1018 case 0:
1019 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1020 break;
1021 case 0 | 4:
1022 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1023 break;
1024 case 1:
1025 if (TCG_NEED_BSWAP) {
1026 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1027 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1028 } else {
1029 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1030 }
1031 break;
1032 case 1 | 4:
1033 if (TCG_NEED_BSWAP) {
1034 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1035 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1036 } else {
1037 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1038 }
1039 break;
1040 case 2:
1041 if (TCG_NEED_BSWAP) {
1042 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1043 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1044 } else {
1045 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1046 }
1047 break;
1048 case 3:
1049 if (TCG_NEED_BSWAP) {
1050 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1051 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1052 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1053 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1054 } else {
1055 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1056 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1057 }
1058 break;
1059 default:
1060 tcg_abort();
1061 }
1062
1063 #if defined(CONFIG_SOFTMMU)
1064 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1065 #endif
1066 }
1067
1068 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1069 int opc)
1070 {
1071 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1072 #if defined(CONFIG_SOFTMMU)
1073 uint8_t *label1_ptr, *label2_ptr;
1074 int arg_num;
1075 int mem_index, s_bits;
1076 int addr_meml;
1077 #endif
1078 #if TARGET_LONG_BITS == 64
1079 # if defined(CONFIG_SOFTMMU)
1080 uint8_t *label3_ptr;
1081 TCGReg addr_regh;
1082 int addr_memh;
1083 # endif
1084 #endif
1085 data_regl = *args++;
1086 if (opc == 3) {
1087 data_regh = *args++;
1088 } else {
1089 data_regh = 0;
1090 }
1091 addr_regl = *args++;
1092 #if defined(CONFIG_SOFTMMU)
1093 # if TARGET_LONG_BITS == 64
1094 addr_regh = *args++;
1095 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1096 addr_memh = 0;
1097 addr_meml = 4;
1098 # else
1099 addr_memh = 4;
1100 addr_meml = 0;
1101 # endif
1102 # else
1103 addr_meml = 0;
1104 # endif
1105 mem_index = *args;
1106 s_bits = opc;
1107 #endif
1108
1109 if (opc == 3) {
1110 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1111 data_reg1 = data_regh;
1112 data_reg2 = data_regl;
1113 #else
1114 data_reg1 = data_regl;
1115 data_reg2 = data_regh;
1116 #endif
1117 } else {
1118 data_reg1 = data_regl;
1119 data_reg2 = 0;
1120 }
1121
1122 #if defined(CONFIG_SOFTMMU)
1123 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1124 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1125 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1126 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1127 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1128 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1129 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1130
1131 # if TARGET_LONG_BITS == 64
1132 label3_ptr = s->code_ptr;
1133 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1134 tcg_out_nop(s);
1135
1136 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1137 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1138
1139 label1_ptr = s->code_ptr;
1140 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1141 tcg_out_nop(s);
1142
1143 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1144 # else
1145 label1_ptr = s->code_ptr;
1146 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1147 tcg_out_nop(s);
1148 # endif
1149
1150 /* slow path */
1151 arg_num = 0;
1152 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1153 # if TARGET_LONG_BITS == 64
1154 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1155 # else
1156 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1157 # endif
1158 switch(opc) {
1159 case 0:
1160 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1161 break;
1162 case 1:
1163 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1164 break;
1165 case 2:
1166 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1167 break;
1168 case 3:
1169 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1170 break;
1171 default:
1172 tcg_abort();
1173 }
1174 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1175 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1176 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1177 tcg_out_nop(s);
1178
1179 label2_ptr = s->code_ptr;
1180 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1181 tcg_out_nop(s);
1182
1183 /* label1: fast path */
1184 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1185
1186 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1187 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1188 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1189 #else
1190 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1191 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1192 } else {
1193 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1194 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1195 }
1196
1197 #endif
1198
1199 switch(opc) {
1200 case 0:
1201 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1202 break;
1203 case 1:
1204 if (TCG_NEED_BSWAP) {
1205 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_T0, data_reg1, 0xffff);
1206 tcg_out_bswap16(s, TCG_REG_T0, TCG_REG_T0);
1207 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1208 } else {
1209 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1210 }
1211 break;
1212 case 2:
1213 if (TCG_NEED_BSWAP) {
1214 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1215 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1216 } else {
1217 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1218 }
1219 break;
1220 case 3:
1221 if (TCG_NEED_BSWAP) {
1222 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1223 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1224 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1225 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1226 } else {
1227 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1228 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1229 }
1230 break;
1231 default:
1232 tcg_abort();
1233 }
1234
1235 #if defined(CONFIG_SOFTMMU)
1236 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1237 #endif
1238 }
1239
1240 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1241 const TCGArg *args, const int *const_args)
1242 {
1243 switch(opc) {
1244 case INDEX_op_exit_tb:
1245 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1246 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1247 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1248 tcg_out_nop(s);
1249 break;
1250 case INDEX_op_goto_tb:
1251 if (s->tb_jmp_offset) {
1252 /* direct jump method */
1253 tcg_abort();
1254 } else {
1255 /* indirect jump method */
1256 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1257 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1258 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1259 }
1260 tcg_out_nop(s);
1261 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1262 break;
1263 case INDEX_op_call:
1264 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1265 tcg_out_nop(s);
1266 break;
1267 case INDEX_op_jmp:
1268 tcg_out_opc_reg(s, OPC_JR, 0, args[0], 0);
1269 tcg_out_nop(s);
1270 break;
1271 case INDEX_op_br:
1272 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1273 break;
1274
1275 case INDEX_op_mov_i32:
1276 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1277 break;
1278 case INDEX_op_movi_i32:
1279 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1280 break;
1281
1282 case INDEX_op_ld8u_i32:
1283 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1284 break;
1285 case INDEX_op_ld8s_i32:
1286 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1287 break;
1288 case INDEX_op_ld16u_i32:
1289 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1290 break;
1291 case INDEX_op_ld16s_i32:
1292 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1293 break;
1294 case INDEX_op_ld_i32:
1295 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1296 break;
1297 case INDEX_op_st8_i32:
1298 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1299 break;
1300 case INDEX_op_st16_i32:
1301 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1302 break;
1303 case INDEX_op_st_i32:
1304 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1305 break;
1306
1307 case INDEX_op_add_i32:
1308 if (const_args[2]) {
1309 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1310 } else {
1311 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1312 }
1313 break;
1314 case INDEX_op_add2_i32:
1315 if (const_args[4]) {
1316 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1317 } else {
1318 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1319 }
1320 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1321 if (const_args[5]) {
1322 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1323 } else {
1324 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1325 }
1326 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1327 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1328 break;
1329 case INDEX_op_sub_i32:
1330 if (const_args[2]) {
1331 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1332 } else {
1333 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1334 }
1335 break;
1336 case INDEX_op_sub2_i32:
1337 if (const_args[4]) {
1338 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1339 } else {
1340 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1341 }
1342 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1343 if (const_args[5]) {
1344 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1345 } else {
1346 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1347 }
1348 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1349 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1350 break;
1351 case INDEX_op_mul_i32:
1352 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1353 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1354 break;
1355 case INDEX_op_mulu2_i32:
1356 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1357 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1358 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1359 break;
1360 case INDEX_op_div_i32:
1361 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1362 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1363 break;
1364 case INDEX_op_divu_i32:
1365 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1366 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1367 break;
1368 case INDEX_op_rem_i32:
1369 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1370 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1371 break;
1372 case INDEX_op_remu_i32:
1373 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1374 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1375 break;
1376
1377 case INDEX_op_and_i32:
1378 if (const_args[2]) {
1379 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1380 } else {
1381 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1382 }
1383 break;
1384 case INDEX_op_or_i32:
1385 if (const_args[2]) {
1386 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1387 } else {
1388 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1389 }
1390 break;
1391 case INDEX_op_nor_i32:
1392 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1393 break;
1394 case INDEX_op_not_i32:
1395 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1396 break;
1397 case INDEX_op_xor_i32:
1398 if (const_args[2]) {
1399 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1400 } else {
1401 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1402 }
1403 break;
1404
1405 case INDEX_op_sar_i32:
1406 if (const_args[2]) {
1407 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1408 } else {
1409 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1410 }
1411 break;
1412 case INDEX_op_shl_i32:
1413 if (const_args[2]) {
1414 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1415 } else {
1416 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1417 }
1418 break;
1419 case INDEX_op_shr_i32:
1420 if (const_args[2]) {
1421 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1422 } else {
1423 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1424 }
1425 break;
1426 case INDEX_op_rotl_i32:
1427 if (const_args[2]) {
1428 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], 0x20 - args[2]);
1429 } else {
1430 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, 32);
1431 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, TCG_REG_AT, args[2]);
1432 tcg_out_opc_reg(s, OPC_ROTRV, args[0], TCG_REG_AT, args[1]);
1433 }
1434 break;
1435 case INDEX_op_rotr_i32:
1436 if (const_args[2]) {
1437 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], args[2]);
1438 } else {
1439 tcg_out_opc_reg(s, OPC_ROTRV, args[0], args[2], args[1]);
1440 }
1441 break;
1442
1443 /* The bswap routines do not work on non-R2 CPU. In that case
1444 we let TCG generating the corresponding code. */
1445 case INDEX_op_bswap16_i32:
1446 tcg_out_bswap16(s, args[0], args[1]);
1447 break;
1448 case INDEX_op_bswap32_i32:
1449 tcg_out_bswap32(s, args[0], args[1]);
1450 break;
1451
1452 case INDEX_op_ext8s_i32:
1453 tcg_out_ext8s(s, args[0], args[1]);
1454 break;
1455 case INDEX_op_ext16s_i32:
1456 tcg_out_ext16s(s, args[0], args[1]);
1457 break;
1458
1459 case INDEX_op_deposit_i32:
1460 tcg_out_opc_imm(s, OPC_INS, args[0], args[2],
1461 ((args[3] + args[4] - 1) << 11) | (args[3] << 6));
1462 break;
1463
1464 case INDEX_op_brcond_i32:
1465 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1466 break;
1467 case INDEX_op_brcond2_i32:
1468 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1469 break;
1470
1471 case INDEX_op_setcond_i32:
1472 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1473 break;
1474 case INDEX_op_setcond2_i32:
1475 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1476 break;
1477
1478 case INDEX_op_qemu_ld8u:
1479 tcg_out_qemu_ld(s, args, 0);
1480 break;
1481 case INDEX_op_qemu_ld8s:
1482 tcg_out_qemu_ld(s, args, 0 | 4);
1483 break;
1484 case INDEX_op_qemu_ld16u:
1485 tcg_out_qemu_ld(s, args, 1);
1486 break;
1487 case INDEX_op_qemu_ld16s:
1488 tcg_out_qemu_ld(s, args, 1 | 4);
1489 break;
1490 case INDEX_op_qemu_ld32:
1491 tcg_out_qemu_ld(s, args, 2);
1492 break;
1493 case INDEX_op_qemu_ld64:
1494 tcg_out_qemu_ld(s, args, 3);
1495 break;
1496 case INDEX_op_qemu_st8:
1497 tcg_out_qemu_st(s, args, 0);
1498 break;
1499 case INDEX_op_qemu_st16:
1500 tcg_out_qemu_st(s, args, 1);
1501 break;
1502 case INDEX_op_qemu_st32:
1503 tcg_out_qemu_st(s, args, 2);
1504 break;
1505 case INDEX_op_qemu_st64:
1506 tcg_out_qemu_st(s, args, 3);
1507 break;
1508
1509 default:
1510 tcg_abort();
1511 }
1512 }
1513
1514 static const TCGTargetOpDef mips_op_defs[] = {
1515 { INDEX_op_exit_tb, { } },
1516 { INDEX_op_goto_tb, { } },
1517 { INDEX_op_call, { "C" } },
1518 { INDEX_op_jmp, { "r" } },
1519 { INDEX_op_br, { } },
1520
1521 { INDEX_op_mov_i32, { "r", "r" } },
1522 { INDEX_op_movi_i32, { "r" } },
1523 { INDEX_op_ld8u_i32, { "r", "r" } },
1524 { INDEX_op_ld8s_i32, { "r", "r" } },
1525 { INDEX_op_ld16u_i32, { "r", "r" } },
1526 { INDEX_op_ld16s_i32, { "r", "r" } },
1527 { INDEX_op_ld_i32, { "r", "r" } },
1528 { INDEX_op_st8_i32, { "rZ", "r" } },
1529 { INDEX_op_st16_i32, { "rZ", "r" } },
1530 { INDEX_op_st_i32, { "rZ", "r" } },
1531
1532 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1533 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1534 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1535 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1536 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1537 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1538 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1539 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1540
1541 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1542 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1543 { INDEX_op_not_i32, { "r", "rZ" } },
1544 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1545 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1546
1547 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1548 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1549 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1550 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
1551 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
1552
1553 { INDEX_op_bswap16_i32, { "r", "r" } },
1554 { INDEX_op_bswap32_i32, { "r", "r" } },
1555
1556 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1557 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1558
1559 { INDEX_op_deposit_i32, { "r", "0", "rZ" } },
1560
1561 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1562 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1563 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1564
1565 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1566 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1567 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1568
1569 #if TARGET_LONG_BITS == 32
1570 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1571 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1572 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1573 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1574 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1575 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1576
1577 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1578 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1579 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1580 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1581 #else
1582 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1583 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1584 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1585 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1586 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1587 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1588
1589 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1590 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1591 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1592 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1593 #endif
1594 { -1 },
1595 };
1596
1597 static int tcg_target_callee_save_regs[] = {
1598 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1599 TCG_REG_S1,
1600 TCG_REG_S2,
1601 TCG_REG_S3,
1602 TCG_REG_S4,
1603 TCG_REG_S5,
1604 TCG_REG_S6,
1605 TCG_REG_S7,
1606 TCG_REG_FP,
1607 TCG_REG_RA, /* should be last for ABI compliance */
1608 };
1609
1610 /* Generate global QEMU prologue and epilogue code */
1611 static void tcg_target_qemu_prologue(TCGContext *s)
1612 {
1613 int i, frame_size;
1614
1615 /* reserve some stack space, also for TCG temps. */
1616 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1617 + TCG_STATIC_CALL_ARGS_SIZE
1618 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1619 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1620 ~(TCG_TARGET_STACK_ALIGN - 1);
1621 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1622 + TCG_STATIC_CALL_ARGS_SIZE,
1623 CPU_TEMP_BUF_NLONGS * sizeof(long));
1624
1625 /* TB prologue */
1626 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1627 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1628 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1629 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1630 }
1631
1632 /* Call generated code */
1633 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1634 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1635 tb_ret_addr = s->code_ptr;
1636
1637 /* TB epilogue */
1638 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1639 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1640 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1641 }
1642
1643 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1644 tcg_out_addi(s, TCG_REG_SP, frame_size);
1645 }
1646
1647 static void tcg_target_init(TCGContext *s)
1648 {
1649 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1650 tcg_regset_set(tcg_target_call_clobber_regs,
1651 (1 << TCG_REG_V0) |
1652 (1 << TCG_REG_V1) |
1653 (1 << TCG_REG_A0) |
1654 (1 << TCG_REG_A1) |
1655 (1 << TCG_REG_A2) |
1656 (1 << TCG_REG_A3) |
1657 (1 << TCG_REG_T1) |
1658 (1 << TCG_REG_T2) |
1659 (1 << TCG_REG_T3) |
1660 (1 << TCG_REG_T4) |
1661 (1 << TCG_REG_T5) |
1662 (1 << TCG_REG_T6) |
1663 (1 << TCG_REG_T7) |
1664 (1 << TCG_REG_T8) |
1665 (1 << TCG_REG_T9));
1666
1667 tcg_regset_clear(s->reserved_regs);
1668 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1669 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1670 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1671 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1672 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1673 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1674 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1675 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1676
1677 tcg_add_target_add_op_defs(mips_op_defs);
1678 }