2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
31 #define MIPS_DEBUG_DISAS 0
32 //#define MIPS_DEBUG_SIGN_EXTENSIONS
34 /* MIPS major opcodes */
35 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
38 /* indirect opcode tables */
39 OPC_SPECIAL
= (0x00 << 26),
40 OPC_REGIMM
= (0x01 << 26),
41 OPC_CP0
= (0x10 << 26),
42 OPC_CP1
= (0x11 << 26),
43 OPC_CP2
= (0x12 << 26),
44 OPC_CP3
= (0x13 << 26),
45 OPC_SPECIAL2
= (0x1C << 26),
46 OPC_SPECIAL3
= (0x1F << 26),
47 /* arithmetic with immediate */
48 OPC_ADDI
= (0x08 << 26),
49 OPC_ADDIU
= (0x09 << 26),
50 OPC_SLTI
= (0x0A << 26),
51 OPC_SLTIU
= (0x0B << 26),
52 /* logic with immediate */
53 OPC_ANDI
= (0x0C << 26),
54 OPC_ORI
= (0x0D << 26),
55 OPC_XORI
= (0x0E << 26),
56 OPC_LUI
= (0x0F << 26),
57 /* arithmetic with immediate */
58 OPC_DADDI
= (0x18 << 26),
59 OPC_DADDIU
= (0x19 << 26),
60 /* Jump and branches */
62 OPC_JAL
= (0x03 << 26),
63 OPC_JALS
= OPC_JAL
| 0x5,
64 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
65 OPC_BEQL
= (0x14 << 26),
66 OPC_BNE
= (0x05 << 26),
67 OPC_BNEL
= (0x15 << 26),
68 OPC_BLEZ
= (0x06 << 26),
69 OPC_BLEZL
= (0x16 << 26),
70 OPC_BGTZ
= (0x07 << 26),
71 OPC_BGTZL
= (0x17 << 26),
72 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
73 OPC_JALXS
= OPC_JALX
| 0x5,
75 OPC_LDL
= (0x1A << 26),
76 OPC_LDR
= (0x1B << 26),
77 OPC_LB
= (0x20 << 26),
78 OPC_LH
= (0x21 << 26),
79 OPC_LWL
= (0x22 << 26),
80 OPC_LW
= (0x23 << 26),
81 OPC_LWPC
= OPC_LW
| 0x5,
82 OPC_LBU
= (0x24 << 26),
83 OPC_LHU
= (0x25 << 26),
84 OPC_LWR
= (0x26 << 26),
85 OPC_LWU
= (0x27 << 26),
86 OPC_SB
= (0x28 << 26),
87 OPC_SH
= (0x29 << 26),
88 OPC_SWL
= (0x2A << 26),
89 OPC_SW
= (0x2B << 26),
90 OPC_SDL
= (0x2C << 26),
91 OPC_SDR
= (0x2D << 26),
92 OPC_SWR
= (0x2E << 26),
93 OPC_LL
= (0x30 << 26),
94 OPC_LLD
= (0x34 << 26),
95 OPC_LD
= (0x37 << 26),
96 OPC_LDPC
= OPC_LD
| 0x5,
97 OPC_SC
= (0x38 << 26),
98 OPC_SCD
= (0x3C << 26),
99 OPC_SD
= (0x3F << 26),
100 /* Floating point load/store */
101 OPC_LWC1
= (0x31 << 26),
102 OPC_LWC2
= (0x32 << 26),
103 OPC_LDC1
= (0x35 << 26),
104 OPC_LDC2
= (0x36 << 26),
105 OPC_SWC1
= (0x39 << 26),
106 OPC_SWC2
= (0x3A << 26),
107 OPC_SDC1
= (0x3D << 26),
108 OPC_SDC2
= (0x3E << 26),
109 /* MDMX ASE specific */
110 OPC_MDMX
= (0x1E << 26),
111 /* Cache and prefetch */
112 OPC_CACHE
= (0x2F << 26),
113 OPC_PREF
= (0x33 << 26),
114 /* Reserved major opcode */
115 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
118 /* MIPS special opcodes */
119 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
123 OPC_SLL
= 0x00 | OPC_SPECIAL
,
124 /* NOP is SLL r0, r0, 0 */
125 /* SSNOP is SLL r0, r0, 1 */
126 /* EHB is SLL r0, r0, 3 */
127 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
128 OPC_ROTR
= OPC_SRL
| (1 << 21),
129 OPC_SRA
= 0x03 | OPC_SPECIAL
,
130 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
131 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
132 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
133 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
134 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
135 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
136 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DROTR
= OPC_DSRL
| (1 << 21),
141 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
142 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
143 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
144 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
145 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
146 /* Multiplication / division */
147 OPC_MULT
= 0x18 | OPC_SPECIAL
,
148 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
149 OPC_DIV
= 0x1A | OPC_SPECIAL
,
150 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
151 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
152 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
153 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
154 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
155 /* 2 registers arithmetic / logic */
156 OPC_ADD
= 0x20 | OPC_SPECIAL
,
157 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
158 OPC_SUB
= 0x22 | OPC_SPECIAL
,
159 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
160 OPC_AND
= 0x24 | OPC_SPECIAL
,
161 OPC_OR
= 0x25 | OPC_SPECIAL
,
162 OPC_XOR
= 0x26 | OPC_SPECIAL
,
163 OPC_NOR
= 0x27 | OPC_SPECIAL
,
164 OPC_SLT
= 0x2A | OPC_SPECIAL
,
165 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
166 OPC_DADD
= 0x2C | OPC_SPECIAL
,
167 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
168 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
169 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
171 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
172 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
173 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
174 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
176 OPC_TGE
= 0x30 | OPC_SPECIAL
,
177 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
178 OPC_TLT
= 0x32 | OPC_SPECIAL
,
179 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
180 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
181 OPC_TNE
= 0x36 | OPC_SPECIAL
,
182 /* HI / LO registers load & stores */
183 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
184 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
185 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
186 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
187 /* Conditional moves */
188 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
189 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
191 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
194 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
195 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
196 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
197 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
198 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
200 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
201 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
202 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
203 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
204 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
205 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
206 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
209 /* Multiplication variants of the vr54xx. */
210 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
213 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
214 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
216 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
218 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
219 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
220 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
221 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
222 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
223 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
224 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
225 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
226 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
229 /* REGIMM (rt field) opcodes */
230 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
233 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
234 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
235 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
236 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
237 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
238 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
239 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
240 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
241 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
242 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
243 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
244 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
245 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
246 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
247 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
248 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
249 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
252 /* Special2 opcodes */
253 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
256 /* Multiply & xxx operations */
257 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
258 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
259 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
260 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
261 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
263 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
264 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
265 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
266 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
267 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
268 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
269 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
270 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
271 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
272 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
273 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
274 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
276 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
277 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
278 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
279 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
281 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
284 /* Special3 opcodes */
285 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
288 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
289 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
290 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
291 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
292 OPC_INS
= 0x04 | OPC_SPECIAL3
,
293 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
294 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
295 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
296 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
297 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
298 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
299 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
300 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
303 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
304 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
305 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
306 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
307 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
308 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
309 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
310 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
311 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
312 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
313 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
314 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
318 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
321 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
322 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
323 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
327 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
330 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
331 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
334 /* Coprocessor 0 (rs field) */
335 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
338 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
339 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
340 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
341 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
342 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
343 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
344 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
345 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
346 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
347 OPC_C0
= (0x10 << 21) | OPC_CP0
,
348 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
349 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
353 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
356 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
357 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
358 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
359 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
360 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
361 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
364 /* Coprocessor 0 (with rs == C0) */
365 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
368 OPC_TLBR
= 0x01 | OPC_C0
,
369 OPC_TLBWI
= 0x02 | OPC_C0
,
370 OPC_TLBWR
= 0x06 | OPC_C0
,
371 OPC_TLBP
= 0x08 | OPC_C0
,
372 OPC_RFE
= 0x10 | OPC_C0
,
373 OPC_ERET
= 0x18 | OPC_C0
,
374 OPC_DERET
= 0x1F | OPC_C0
,
375 OPC_WAIT
= 0x20 | OPC_C0
,
378 /* Coprocessor 1 (rs field) */
379 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
381 /* Values for the fmt field in FP instructions */
383 /* 0 - 15 are reserved */
384 FMT_S
= 16, /* single fp */
385 FMT_D
= 17, /* double fp */
386 FMT_E
= 18, /* extended fp */
387 FMT_Q
= 19, /* quad fp */
388 FMT_W
= 20, /* 32-bit fixed */
389 FMT_L
= 21, /* 64-bit fixed */
390 FMT_PS
= 22, /* paired single fp */
391 /* 23 - 31 are reserved */
395 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
396 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
397 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
398 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
399 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
400 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
401 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
402 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
403 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
404 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
405 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
406 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
407 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
408 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
409 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
410 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
411 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
412 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
415 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
416 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
419 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
420 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
421 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
422 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
426 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
427 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
431 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
432 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
435 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
438 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
439 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
440 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
441 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
442 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
443 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
444 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
445 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
446 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
449 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
452 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
453 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
454 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
455 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
456 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
457 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
458 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
459 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
461 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
462 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
463 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
464 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
465 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
466 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
467 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
468 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
470 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
471 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
472 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
473 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
474 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
475 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
476 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
477 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
479 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
480 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
481 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
482 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
483 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
484 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
485 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
486 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
488 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
489 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
490 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
491 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
492 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
493 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
495 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
496 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
497 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
498 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
499 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
500 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
502 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
503 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
504 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
505 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
506 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
507 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
509 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
510 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
511 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
512 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
513 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
514 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
516 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
517 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
518 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
519 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
520 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
521 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
523 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
524 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
525 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
526 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
527 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
528 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
530 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
531 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
532 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
533 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
534 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
535 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
537 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
538 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
539 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
540 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
541 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
542 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
546 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
549 OPC_LWXC1
= 0x00 | OPC_CP3
,
550 OPC_LDXC1
= 0x01 | OPC_CP3
,
551 OPC_LUXC1
= 0x05 | OPC_CP3
,
552 OPC_SWXC1
= 0x08 | OPC_CP3
,
553 OPC_SDXC1
= 0x09 | OPC_CP3
,
554 OPC_SUXC1
= 0x0D | OPC_CP3
,
555 OPC_PREFX
= 0x0F | OPC_CP3
,
556 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
557 OPC_MADD_S
= 0x20 | OPC_CP3
,
558 OPC_MADD_D
= 0x21 | OPC_CP3
,
559 OPC_MADD_PS
= 0x26 | OPC_CP3
,
560 OPC_MSUB_S
= 0x28 | OPC_CP3
,
561 OPC_MSUB_D
= 0x29 | OPC_CP3
,
562 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
563 OPC_NMADD_S
= 0x30 | OPC_CP3
,
564 OPC_NMADD_D
= 0x31 | OPC_CP3
,
565 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
566 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
567 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
568 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
571 /* global register indices */
572 static TCGv_ptr cpu_env
;
573 static TCGv cpu_gpr
[32], cpu_PC
;
574 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
575 static TCGv cpu_dspctrl
, btarget
, bcond
;
576 static TCGv_i32 hflags
;
577 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
578 static TCGv_i64 fpu_f64
[32];
580 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
582 #include "gen-icount.h"
584 #define gen_helper_0e0i(name, arg) do { \
585 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
586 gen_helper_##name(cpu_env, helper_tmp); \
587 tcg_temp_free_i32(helper_tmp); \
590 #define gen_helper_0e1i(name, arg1, arg2) do { \
591 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
592 gen_helper_##name(cpu_env, arg1, helper_tmp); \
593 tcg_temp_free_i32(helper_tmp); \
596 #define gen_helper_1e0i(name, ret, arg1) do { \
597 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
598 gen_helper_##name(ret, cpu_env, helper_tmp); \
599 tcg_temp_free_i32(helper_tmp); \
602 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
603 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
604 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
605 tcg_temp_free_i32(helper_tmp); \
608 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
609 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
610 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
611 tcg_temp_free_i32(helper_tmp); \
614 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
615 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
616 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
617 tcg_temp_free_i32(helper_tmp); \
620 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
621 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
622 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
623 tcg_temp_free_i32(helper_tmp); \
626 typedef struct DisasContext
{
627 struct TranslationBlock
*tb
;
628 target_ulong pc
, saved_pc
;
630 int singlestep_enabled
;
631 /* Routine used to access memory */
633 uint32_t hflags
, saved_hflags
;
635 target_ulong btarget
;
639 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
640 * exception condition */
641 BS_STOP
= 1, /* We want to stop translation for any reason */
642 BS_BRANCH
= 2, /* We reached a branch condition */
643 BS_EXCP
= 3, /* We reached an exception condition */
646 static const char * const regnames
[] = {
647 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
648 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
649 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
650 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
653 static const char * const regnames_HI
[] = {
654 "HI0", "HI1", "HI2", "HI3",
657 static const char * const regnames_LO
[] = {
658 "LO0", "LO1", "LO2", "LO3",
661 static const char * const regnames_ACX
[] = {
662 "ACX0", "ACX1", "ACX2", "ACX3",
665 static const char * const fregnames
[] = {
666 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
667 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
668 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
669 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
672 #define MIPS_DEBUG(fmt, ...) \
674 if (MIPS_DEBUG_DISAS) { \
675 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
676 TARGET_FMT_lx ": %08x " fmt "\n", \
677 ctx->pc, ctx->opcode , ## __VA_ARGS__); \
681 #define LOG_DISAS(...) \
683 if (MIPS_DEBUG_DISAS) { \
684 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
688 #define MIPS_INVAL(op) \
689 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
690 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F))
692 /* General purpose registers moves. */
693 static inline void gen_load_gpr (TCGv t
, int reg
)
696 tcg_gen_movi_tl(t
, 0);
698 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
701 static inline void gen_store_gpr (TCGv t
, int reg
)
704 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
707 /* Moves to/from ACX register. */
708 static inline void gen_load_ACX (TCGv t
, int reg
)
710 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
713 static inline void gen_store_ACX (TCGv t
, int reg
)
715 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
718 /* Moves to/from shadow registers. */
719 static inline void gen_load_srsgpr (int from
, int to
)
721 TCGv t0
= tcg_temp_new();
724 tcg_gen_movi_tl(t0
, 0);
726 TCGv_i32 t2
= tcg_temp_new_i32();
727 TCGv_ptr addr
= tcg_temp_new_ptr();
729 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
730 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
731 tcg_gen_andi_i32(t2
, t2
, 0xf);
732 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
733 tcg_gen_ext_i32_ptr(addr
, t2
);
734 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
736 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
737 tcg_temp_free_ptr(addr
);
738 tcg_temp_free_i32(t2
);
740 gen_store_gpr(t0
, to
);
744 static inline void gen_store_srsgpr (int from
, int to
)
747 TCGv t0
= tcg_temp_new();
748 TCGv_i32 t2
= tcg_temp_new_i32();
749 TCGv_ptr addr
= tcg_temp_new_ptr();
751 gen_load_gpr(t0
, from
);
752 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
753 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
754 tcg_gen_andi_i32(t2
, t2
, 0xf);
755 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
756 tcg_gen_ext_i32_ptr(addr
, t2
);
757 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
759 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
760 tcg_temp_free_ptr(addr
);
761 tcg_temp_free_i32(t2
);
766 /* Floating point register moves. */
767 static void gen_load_fpr32(TCGv_i32 t
, int reg
)
769 tcg_gen_trunc_i64_i32(t
, fpu_f64
[reg
]);
772 static void gen_store_fpr32(TCGv_i32 t
, int reg
)
774 TCGv_i64 t64
= tcg_temp_new_i64();
775 tcg_gen_extu_i32_i64(t64
, t
);
776 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
777 tcg_temp_free_i64(t64
);
780 static void gen_load_fpr32h(TCGv_i32 t
, int reg
)
782 TCGv_i64 t64
= tcg_temp_new_i64();
783 tcg_gen_shri_i64(t64
, fpu_f64
[reg
], 32);
784 tcg_gen_trunc_i64_i32(t
, t64
);
785 tcg_temp_free_i64(t64
);
788 static void gen_store_fpr32h(TCGv_i32 t
, int reg
)
790 TCGv_i64 t64
= tcg_temp_new_i64();
791 tcg_gen_extu_i32_i64(t64
, t
);
792 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
793 tcg_temp_free_i64(t64
);
796 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
798 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
799 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
801 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
805 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
807 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
808 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
811 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
812 t0
= tcg_temp_new_i64();
813 tcg_gen_shri_i64(t0
, t
, 32);
814 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
815 tcg_temp_free_i64(t0
);
819 static inline int get_fp_bit (int cc
)
828 static inline void gen_save_pc(target_ulong pc
)
830 tcg_gen_movi_tl(cpu_PC
, pc
);
833 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
835 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
836 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
837 gen_save_pc(ctx
->pc
);
838 ctx
->saved_pc
= ctx
->pc
;
840 if (ctx
->hflags
!= ctx
->saved_hflags
) {
841 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
842 ctx
->saved_hflags
= ctx
->hflags
;
843 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
849 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
855 static inline void restore_cpu_state (CPUMIPSState
*env
, DisasContext
*ctx
)
857 ctx
->saved_hflags
= ctx
->hflags
;
858 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
864 ctx
->btarget
= env
->btarget
;
870 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
872 TCGv_i32 texcp
= tcg_const_i32(excp
);
873 TCGv_i32 terr
= tcg_const_i32(err
);
874 save_cpu_state(ctx
, 1);
875 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
876 tcg_temp_free_i32(terr
);
877 tcg_temp_free_i32(texcp
);
881 generate_exception (DisasContext
*ctx
, int excp
)
883 save_cpu_state(ctx
, 1);
884 gen_helper_0e0i(raise_exception
, excp
);
887 /* Addresses computation */
888 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
890 tcg_gen_add_tl(ret
, arg0
, arg1
);
892 #if defined(TARGET_MIPS64)
893 /* For compatibility with 32-bit code, data reference in user mode
894 with Status_UX = 0 should be casted to 32-bit and sign extended.
895 See the MIPS64 PRA manual, section 4.10. */
896 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
897 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
898 tcg_gen_ext32s_i64(ret
, ret
);
903 static inline void check_cp0_enabled(DisasContext
*ctx
)
905 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
906 generate_exception_err(ctx
, EXCP_CpU
, 0);
909 static inline void check_cp1_enabled(DisasContext
*ctx
)
911 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
912 generate_exception_err(ctx
, EXCP_CpU
, 1);
915 /* Verify that the processor is running with COP1X instructions enabled.
916 This is associated with the nabla symbol in the MIPS32 and MIPS64
919 static inline void check_cop1x(DisasContext
*ctx
)
921 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
922 generate_exception(ctx
, EXCP_RI
);
925 /* Verify that the processor is running with 64-bit floating-point
926 operations enabled. */
928 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
930 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
931 generate_exception(ctx
, EXCP_RI
);
935 * Verify if floating point register is valid; an operation is not defined
936 * if bit 0 of any register specification is set and the FR bit in the
937 * Status register equals zero, since the register numbers specify an
938 * even-odd pair of adjacent coprocessor general registers. When the FR bit
939 * in the Status register equals one, both even and odd register numbers
940 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
942 * Multiple 64 bit wide registers can be checked by calling
943 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
945 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
947 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
948 generate_exception(ctx
, EXCP_RI
);
951 /* This code generates a "reserved instruction" exception if the
952 CPU does not support the instruction set corresponding to flags. */
953 static inline void check_insn(CPUMIPSState
*env
, DisasContext
*ctx
, int flags
)
955 if (unlikely(!(env
->insn_flags
& flags
)))
956 generate_exception(ctx
, EXCP_RI
);
959 /* This code generates a "reserved instruction" exception if 64-bit
960 instructions are not enabled. */
961 static inline void check_mips_64(DisasContext
*ctx
)
963 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
964 generate_exception(ctx
, EXCP_RI
);
967 /* Define small wrappers for gen_load_fpr* so that we have a uniform
968 calling interface for 32 and 64-bit FPRs. No sense in changing
969 all callers for gen_load_fpr32 when we need the CTX parameter for
971 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
972 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
973 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
974 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
975 int ft, int fs, int cc) \
977 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
978 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
981 check_cp1_64bitmode(ctx); \
987 check_cp1_registers(ctx, fs | ft); \
995 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
996 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
998 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
999 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1000 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1001 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1002 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1003 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1004 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1005 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1006 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1007 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1008 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1009 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1010 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1011 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1012 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1013 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1016 tcg_temp_free_i##bits (fp0); \
1017 tcg_temp_free_i##bits (fp1); \
1020 FOP_CONDS(, 0, d
, FMT_D
, 64)
1021 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1022 FOP_CONDS(, 0, s
, FMT_S
, 32)
1023 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1024 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1025 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1027 #undef gen_ldcmp_fpr32
1028 #undef gen_ldcmp_fpr64
1030 /* load/store instructions. */
1031 #define OP_LD(insn,fname) \
1032 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1034 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1041 #if defined(TARGET_MIPS64)
1047 #define OP_ST(insn,fname) \
1048 static inline void op_st_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
1050 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
1055 #if defined(TARGET_MIPS64)
1060 #ifdef CONFIG_USER_ONLY
1061 #define OP_LD_ATOMIC(insn,fname) \
1062 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1064 TCGv t0 = tcg_temp_new(); \
1065 tcg_gen_mov_tl(t0, arg1); \
1066 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1067 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1068 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1069 tcg_temp_free(t0); \
1072 #define OP_LD_ATOMIC(insn,fname) \
1073 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1075 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
1078 OP_LD_ATOMIC(ll
,ld32s
);
1079 #if defined(TARGET_MIPS64)
1080 OP_LD_ATOMIC(lld
,ld64
);
1084 #ifdef CONFIG_USER_ONLY
1085 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1086 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1088 TCGv t0 = tcg_temp_new(); \
1089 int l1 = gen_new_label(); \
1090 int l2 = gen_new_label(); \
1092 tcg_gen_andi_tl(t0, arg2, almask); \
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
1094 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
1095 generate_exception(ctx, EXCP_AdES); \
1096 gen_set_label(l1); \
1097 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1098 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
1099 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
1100 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
1101 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
1102 gen_helper_0e0i(raise_exception, EXCP_SC); \
1103 gen_set_label(l2); \
1104 tcg_gen_movi_tl(t0, 0); \
1105 gen_store_gpr(t0, rt); \
1106 tcg_temp_free(t0); \
1109 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1110 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1112 TCGv t0 = tcg_temp_new(); \
1113 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
1114 gen_store_gpr(t0, rt); \
1115 tcg_temp_free(t0); \
1118 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
1119 #if defined(TARGET_MIPS64)
1120 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1124 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1125 int base
, int16_t offset
)
1128 tcg_gen_movi_tl(addr
, offset
);
1129 } else if (offset
== 0) {
1130 gen_load_gpr(addr
, base
);
1132 tcg_gen_movi_tl(addr
, offset
);
1133 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1137 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1139 target_ulong pc
= ctx
->pc
;
1141 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1142 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1147 pc
&= ~(target_ulong
)3;
1152 static void gen_ld (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1153 int rt
, int base
, int16_t offset
)
1155 const char *opn
= "ld";
1158 if (rt
== 0 && env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1159 /* Loongson CPU uses a load to zero register for prefetch.
1160 We emulate it as a NOP. On other CPU we must perform the
1161 actual memory access. */
1166 t0
= tcg_temp_new();
1167 t1
= tcg_temp_new();
1168 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1171 #if defined(TARGET_MIPS64)
1173 save_cpu_state(ctx
, 0);
1174 op_ld_lwu(t0
, t0
, ctx
);
1175 gen_store_gpr(t0
, rt
);
1179 save_cpu_state(ctx
, 0);
1180 op_ld_ld(t0
, t0
, ctx
);
1181 gen_store_gpr(t0
, rt
);
1185 save_cpu_state(ctx
, 1);
1186 op_ld_lld(t0
, t0
, ctx
);
1187 gen_store_gpr(t0
, rt
);
1191 save_cpu_state(ctx
, 1);
1192 gen_load_gpr(t1
, rt
);
1193 gen_helper_1e2i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1194 gen_store_gpr(t1
, rt
);
1198 save_cpu_state(ctx
, 1);
1199 gen_load_gpr(t1
, rt
);
1200 gen_helper_1e2i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1201 gen_store_gpr(t1
, rt
);
1205 save_cpu_state(ctx
, 0);
1206 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1207 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1208 op_ld_ld(t0
, t0
, ctx
);
1209 gen_store_gpr(t0
, rt
);
1214 save_cpu_state(ctx
, 0);
1215 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1216 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1217 op_ld_lw(t0
, t0
, ctx
);
1218 gen_store_gpr(t0
, rt
);
1222 save_cpu_state(ctx
, 0);
1223 op_ld_lw(t0
, t0
, ctx
);
1224 gen_store_gpr(t0
, rt
);
1228 save_cpu_state(ctx
, 0);
1229 op_ld_lh(t0
, t0
, ctx
);
1230 gen_store_gpr(t0
, rt
);
1234 save_cpu_state(ctx
, 0);
1235 op_ld_lhu(t0
, t0
, ctx
);
1236 gen_store_gpr(t0
, rt
);
1240 save_cpu_state(ctx
, 0);
1241 op_ld_lb(t0
, t0
, ctx
);
1242 gen_store_gpr(t0
, rt
);
1246 save_cpu_state(ctx
, 0);
1247 op_ld_lbu(t0
, t0
, ctx
);
1248 gen_store_gpr(t0
, rt
);
1252 save_cpu_state(ctx
, 1);
1253 gen_load_gpr(t1
, rt
);
1254 gen_helper_1e2i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1255 gen_store_gpr(t1
, rt
);
1259 save_cpu_state(ctx
, 1);
1260 gen_load_gpr(t1
, rt
);
1261 gen_helper_1e2i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1262 gen_store_gpr(t1
, rt
);
1266 save_cpu_state(ctx
, 1);
1267 op_ld_ll(t0
, t0
, ctx
);
1268 gen_store_gpr(t0
, rt
);
1272 (void)opn
; /* avoid a compiler warning */
1273 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1279 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1280 int base
, int16_t offset
)
1282 const char *opn
= "st";
1283 TCGv t0
= tcg_temp_new();
1284 TCGv t1
= tcg_temp_new();
1286 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1287 gen_load_gpr(t1
, rt
);
1289 #if defined(TARGET_MIPS64)
1291 save_cpu_state(ctx
, 0);
1292 op_st_sd(t1
, t0
, ctx
);
1296 save_cpu_state(ctx
, 1);
1297 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1301 save_cpu_state(ctx
, 1);
1302 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1307 save_cpu_state(ctx
, 0);
1308 op_st_sw(t1
, t0
, ctx
);
1312 save_cpu_state(ctx
, 0);
1313 op_st_sh(t1
, t0
, ctx
);
1317 save_cpu_state(ctx
, 0);
1318 op_st_sb(t1
, t0
, ctx
);
1322 save_cpu_state(ctx
, 1);
1323 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
1327 save_cpu_state(ctx
, 1);
1328 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
1332 (void)opn
; /* avoid a compiler warning */
1333 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1339 /* Store conditional */
1340 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1341 int base
, int16_t offset
)
1343 const char *opn
= "st_cond";
1346 t0
= tcg_temp_local_new();
1348 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1349 /* Don't do NOP if destination is zero: we must perform the actual
1352 t1
= tcg_temp_local_new();
1353 gen_load_gpr(t1
, rt
);
1355 #if defined(TARGET_MIPS64)
1357 save_cpu_state(ctx
, 1);
1358 op_st_scd(t1
, t0
, rt
, ctx
);
1363 save_cpu_state(ctx
, 1);
1364 op_st_sc(t1
, t0
, rt
, ctx
);
1368 (void)opn
; /* avoid a compiler warning */
1369 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1374 /* Load and store */
1375 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1376 int base
, int16_t offset
)
1378 const char *opn
= "flt_ldst";
1379 TCGv t0
= tcg_temp_new();
1381 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1382 /* Don't do NOP if destination is zero: we must perform the actual
1387 TCGv_i32 fp0
= tcg_temp_new_i32();
1389 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1390 tcg_gen_trunc_tl_i32(fp0
, t0
);
1391 gen_store_fpr32(fp0
, ft
);
1392 tcg_temp_free_i32(fp0
);
1398 TCGv_i32 fp0
= tcg_temp_new_i32();
1399 TCGv t1
= tcg_temp_new();
1401 gen_load_fpr32(fp0
, ft
);
1402 tcg_gen_extu_i32_tl(t1
, fp0
);
1403 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1405 tcg_temp_free_i32(fp0
);
1411 TCGv_i64 fp0
= tcg_temp_new_i64();
1413 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1414 gen_store_fpr64(ctx
, fp0
, ft
);
1415 tcg_temp_free_i64(fp0
);
1421 TCGv_i64 fp0
= tcg_temp_new_i64();
1423 gen_load_fpr64(ctx
, fp0
, ft
);
1424 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1425 tcg_temp_free_i64(fp0
);
1431 generate_exception(ctx
, EXCP_RI
);
1434 (void)opn
; /* avoid a compiler warning */
1435 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1440 static void gen_cop1_ldst(CPUMIPSState
*env
, DisasContext
*ctx
,
1441 uint32_t op
, int rt
, int rs
, int16_t imm
)
1443 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1444 check_cp1_enabled(ctx
);
1445 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1447 generate_exception_err(ctx
, EXCP_CpU
, 1);
1451 /* Arithmetic with immediate operand */
1452 static void gen_arith_imm (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1453 int rt
, int rs
, int16_t imm
)
1455 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1456 const char *opn
= "imm arith";
1458 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1459 /* If no destination, treat it as a NOP.
1460 For addi, we must generate the overflow exception when needed. */
1467 TCGv t0
= tcg_temp_local_new();
1468 TCGv t1
= tcg_temp_new();
1469 TCGv t2
= tcg_temp_new();
1470 int l1
= gen_new_label();
1472 gen_load_gpr(t1
, rs
);
1473 tcg_gen_addi_tl(t0
, t1
, uimm
);
1474 tcg_gen_ext32s_tl(t0
, t0
);
1476 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1477 tcg_gen_xori_tl(t2
, t0
, uimm
);
1478 tcg_gen_and_tl(t1
, t1
, t2
);
1480 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1482 /* operands of same sign, result different sign */
1483 generate_exception(ctx
, EXCP_OVERFLOW
);
1485 tcg_gen_ext32s_tl(t0
, t0
);
1486 gen_store_gpr(t0
, rt
);
1493 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1494 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1496 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1500 #if defined(TARGET_MIPS64)
1503 TCGv t0
= tcg_temp_local_new();
1504 TCGv t1
= tcg_temp_new();
1505 TCGv t2
= tcg_temp_new();
1506 int l1
= gen_new_label();
1508 gen_load_gpr(t1
, rs
);
1509 tcg_gen_addi_tl(t0
, t1
, uimm
);
1511 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1512 tcg_gen_xori_tl(t2
, t0
, uimm
);
1513 tcg_gen_and_tl(t1
, t1
, t2
);
1515 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1517 /* operands of same sign, result different sign */
1518 generate_exception(ctx
, EXCP_OVERFLOW
);
1520 gen_store_gpr(t0
, rt
);
1527 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1529 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1535 (void)opn
; /* avoid a compiler warning */
1536 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1539 /* Logic with immediate operand */
1540 static void gen_logic_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1541 int rt
, int rs
, int16_t imm
)
1544 const char *opn
= "imm logic";
1547 /* If no destination, treat it as a NOP. */
1551 uimm
= (uint16_t)imm
;
1554 if (likely(rs
!= 0))
1555 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1557 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1562 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1564 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1568 if (likely(rs
!= 0))
1569 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1571 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1575 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1579 (void)opn
; /* avoid a compiler warning */
1580 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1583 /* Set on less than with immediate operand */
1584 static void gen_slt_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1585 int rt
, int rs
, int16_t imm
)
1587 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1588 const char *opn
= "imm arith";
1592 /* If no destination, treat it as a NOP. */
1596 t0
= tcg_temp_new();
1597 gen_load_gpr(t0
, rs
);
1600 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1604 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1608 (void)opn
; /* avoid a compiler warning */
1609 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1613 /* Shifts with immediate operand */
1614 static void gen_shift_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1615 int rt
, int rs
, int16_t imm
)
1617 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1618 const char *opn
= "imm shift";
1622 /* If no destination, treat it as a NOP. */
1627 t0
= tcg_temp_new();
1628 gen_load_gpr(t0
, rs
);
1631 tcg_gen_shli_tl(t0
, t0
, uimm
);
1632 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1636 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1641 tcg_gen_ext32u_tl(t0
, t0
);
1642 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1644 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1650 TCGv_i32 t1
= tcg_temp_new_i32();
1652 tcg_gen_trunc_tl_i32(t1
, t0
);
1653 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1654 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1655 tcg_temp_free_i32(t1
);
1657 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1661 #if defined(TARGET_MIPS64)
1663 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1667 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1671 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1676 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1678 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1683 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1687 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1691 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1695 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1700 (void)opn
; /* avoid a compiler warning */
1701 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1706 static void gen_arith (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1707 int rd
, int rs
, int rt
)
1709 const char *opn
= "arith";
1711 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1712 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1713 /* If no destination, treat it as a NOP.
1714 For add & sub, we must generate the overflow exception when needed. */
1722 TCGv t0
= tcg_temp_local_new();
1723 TCGv t1
= tcg_temp_new();
1724 TCGv t2
= tcg_temp_new();
1725 int l1
= gen_new_label();
1727 gen_load_gpr(t1
, rs
);
1728 gen_load_gpr(t2
, rt
);
1729 tcg_gen_add_tl(t0
, t1
, t2
);
1730 tcg_gen_ext32s_tl(t0
, t0
);
1731 tcg_gen_xor_tl(t1
, t1
, t2
);
1732 tcg_gen_xor_tl(t2
, t0
, t2
);
1733 tcg_gen_andc_tl(t1
, t2
, t1
);
1735 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1737 /* operands of same sign, result different sign */
1738 generate_exception(ctx
, EXCP_OVERFLOW
);
1740 gen_store_gpr(t0
, rd
);
1746 if (rs
!= 0 && rt
!= 0) {
1747 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1748 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1749 } else if (rs
== 0 && rt
!= 0) {
1750 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1751 } else if (rs
!= 0 && rt
== 0) {
1752 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1754 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1760 TCGv t0
= tcg_temp_local_new();
1761 TCGv t1
= tcg_temp_new();
1762 TCGv t2
= tcg_temp_new();
1763 int l1
= gen_new_label();
1765 gen_load_gpr(t1
, rs
);
1766 gen_load_gpr(t2
, rt
);
1767 tcg_gen_sub_tl(t0
, t1
, t2
);
1768 tcg_gen_ext32s_tl(t0
, t0
);
1769 tcg_gen_xor_tl(t2
, t1
, t2
);
1770 tcg_gen_xor_tl(t1
, t0
, t1
);
1771 tcg_gen_and_tl(t1
, t1
, t2
);
1773 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1775 /* operands of different sign, first operand and result different sign */
1776 generate_exception(ctx
, EXCP_OVERFLOW
);
1778 gen_store_gpr(t0
, rd
);
1784 if (rs
!= 0 && rt
!= 0) {
1785 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1786 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1787 } else if (rs
== 0 && rt
!= 0) {
1788 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1789 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1790 } else if (rs
!= 0 && rt
== 0) {
1791 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1793 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1797 #if defined(TARGET_MIPS64)
1800 TCGv t0
= tcg_temp_local_new();
1801 TCGv t1
= tcg_temp_new();
1802 TCGv t2
= tcg_temp_new();
1803 int l1
= gen_new_label();
1805 gen_load_gpr(t1
, rs
);
1806 gen_load_gpr(t2
, rt
);
1807 tcg_gen_add_tl(t0
, t1
, t2
);
1808 tcg_gen_xor_tl(t1
, t1
, t2
);
1809 tcg_gen_xor_tl(t2
, t0
, t2
);
1810 tcg_gen_andc_tl(t1
, t2
, t1
);
1812 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1814 /* operands of same sign, result different sign */
1815 generate_exception(ctx
, EXCP_OVERFLOW
);
1817 gen_store_gpr(t0
, rd
);
1823 if (rs
!= 0 && rt
!= 0) {
1824 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1825 } else if (rs
== 0 && rt
!= 0) {
1826 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1827 } else if (rs
!= 0 && rt
== 0) {
1828 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1830 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1836 TCGv t0
= tcg_temp_local_new();
1837 TCGv t1
= tcg_temp_new();
1838 TCGv t2
= tcg_temp_new();
1839 int l1
= gen_new_label();
1841 gen_load_gpr(t1
, rs
);
1842 gen_load_gpr(t2
, rt
);
1843 tcg_gen_sub_tl(t0
, t1
, t2
);
1844 tcg_gen_xor_tl(t2
, t1
, t2
);
1845 tcg_gen_xor_tl(t1
, t0
, t1
);
1846 tcg_gen_and_tl(t1
, t1
, t2
);
1848 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1850 /* operands of different sign, first operand and result different sign */
1851 generate_exception(ctx
, EXCP_OVERFLOW
);
1853 gen_store_gpr(t0
, rd
);
1859 if (rs
!= 0 && rt
!= 0) {
1860 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1861 } else if (rs
== 0 && rt
!= 0) {
1862 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1863 } else if (rs
!= 0 && rt
== 0) {
1864 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1866 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1872 if (likely(rs
!= 0 && rt
!= 0)) {
1873 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1874 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1876 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1881 (void)opn
; /* avoid a compiler warning */
1882 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1885 /* Conditional move */
1886 static void gen_cond_move(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1887 int rd
, int rs
, int rt
)
1889 const char *opn
= "cond move";
1893 /* If no destination, treat it as a NOP.
1894 For add & sub, we must generate the overflow exception when needed. */
1899 l1
= gen_new_label();
1902 if (likely(rt
!= 0))
1903 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1909 if (likely(rt
!= 0))
1910 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1915 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1917 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1920 (void)opn
; /* avoid a compiler warning */
1921 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1925 static void gen_logic(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1926 int rd
, int rs
, int rt
)
1928 const char *opn
= "logic";
1931 /* If no destination, treat it as a NOP. */
1938 if (likely(rs
!= 0 && rt
!= 0)) {
1939 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1941 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1946 if (rs
!= 0 && rt
!= 0) {
1947 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1948 } else if (rs
== 0 && rt
!= 0) {
1949 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1950 } else if (rs
!= 0 && rt
== 0) {
1951 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1953 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1958 if (likely(rs
!= 0 && rt
!= 0)) {
1959 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1960 } else if (rs
== 0 && rt
!= 0) {
1961 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1962 } else if (rs
!= 0 && rt
== 0) {
1963 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1965 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1970 if (likely(rs
!= 0 && rt
!= 0)) {
1971 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1972 } else if (rs
== 0 && rt
!= 0) {
1973 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1974 } else if (rs
!= 0 && rt
== 0) {
1975 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1977 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1982 (void)opn
; /* avoid a compiler warning */
1983 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1986 /* Set on lower than */
1987 static void gen_slt(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1988 int rd
, int rs
, int rt
)
1990 const char *opn
= "slt";
1994 /* If no destination, treat it as a NOP. */
1999 t0
= tcg_temp_new();
2000 t1
= tcg_temp_new();
2001 gen_load_gpr(t0
, rs
);
2002 gen_load_gpr(t1
, rt
);
2005 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2009 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2013 (void)opn
; /* avoid a compiler warning */
2014 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2020 static void gen_shift (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
2021 int rd
, int rs
, int rt
)
2023 const char *opn
= "shifts";
2027 /* If no destination, treat it as a NOP.
2028 For add & sub, we must generate the overflow exception when needed. */
2033 t0
= tcg_temp_new();
2034 t1
= tcg_temp_new();
2035 gen_load_gpr(t0
, rs
);
2036 gen_load_gpr(t1
, rt
);
2039 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2040 tcg_gen_shl_tl(t0
, t1
, t0
);
2041 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2045 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2046 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2050 tcg_gen_ext32u_tl(t1
, t1
);
2051 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2052 tcg_gen_shr_tl(t0
, t1
, t0
);
2053 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2058 TCGv_i32 t2
= tcg_temp_new_i32();
2059 TCGv_i32 t3
= tcg_temp_new_i32();
2061 tcg_gen_trunc_tl_i32(t2
, t0
);
2062 tcg_gen_trunc_tl_i32(t3
, t1
);
2063 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2064 tcg_gen_rotr_i32(t2
, t3
, t2
);
2065 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2066 tcg_temp_free_i32(t2
);
2067 tcg_temp_free_i32(t3
);
2071 #if defined(TARGET_MIPS64)
2073 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2074 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2078 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2079 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2083 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2084 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2088 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2089 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2094 (void)opn
; /* avoid a compiler warning */
2095 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2100 /* Arithmetic on HI/LO registers */
2101 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
2103 const char *opn
= "hilo";
2105 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2112 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
2116 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
2121 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
2123 tcg_gen_movi_tl(cpu_HI
[0], 0);
2128 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
2130 tcg_gen_movi_tl(cpu_LO
[0], 0);
2134 (void)opn
; /* avoid a compiler warning */
2135 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2138 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
2141 const char *opn
= "mul/div";
2147 #if defined(TARGET_MIPS64)
2151 t0
= tcg_temp_local_new();
2152 t1
= tcg_temp_local_new();
2155 t0
= tcg_temp_new();
2156 t1
= tcg_temp_new();
2160 gen_load_gpr(t0
, rs
);
2161 gen_load_gpr(t1
, rt
);
2165 int l1
= gen_new_label();
2166 int l2
= gen_new_label();
2168 tcg_gen_ext32s_tl(t0
, t0
);
2169 tcg_gen_ext32s_tl(t1
, t1
);
2170 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2171 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2172 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2174 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2175 tcg_gen_movi_tl(cpu_HI
[0], 0);
2178 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
2179 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
2180 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2181 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2188 int l1
= gen_new_label();
2190 tcg_gen_ext32u_tl(t0
, t0
);
2191 tcg_gen_ext32u_tl(t1
, t1
);
2192 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2193 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2194 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2195 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2196 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2203 TCGv_i64 t2
= tcg_temp_new_i64();
2204 TCGv_i64 t3
= tcg_temp_new_i64();
2206 tcg_gen_ext_tl_i64(t2
, t0
);
2207 tcg_gen_ext_tl_i64(t3
, t1
);
2208 tcg_gen_mul_i64(t2
, t2
, t3
);
2209 tcg_temp_free_i64(t3
);
2210 tcg_gen_trunc_i64_tl(t0
, t2
);
2211 tcg_gen_shri_i64(t2
, t2
, 32);
2212 tcg_gen_trunc_i64_tl(t1
, t2
);
2213 tcg_temp_free_i64(t2
);
2214 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2215 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2221 TCGv_i64 t2
= tcg_temp_new_i64();
2222 TCGv_i64 t3
= tcg_temp_new_i64();
2224 tcg_gen_ext32u_tl(t0
, t0
);
2225 tcg_gen_ext32u_tl(t1
, t1
);
2226 tcg_gen_extu_tl_i64(t2
, t0
);
2227 tcg_gen_extu_tl_i64(t3
, t1
);
2228 tcg_gen_mul_i64(t2
, t2
, t3
);
2229 tcg_temp_free_i64(t3
);
2230 tcg_gen_trunc_i64_tl(t0
, t2
);
2231 tcg_gen_shri_i64(t2
, t2
, 32);
2232 tcg_gen_trunc_i64_tl(t1
, t2
);
2233 tcg_temp_free_i64(t2
);
2234 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2235 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2239 #if defined(TARGET_MIPS64)
2242 int l1
= gen_new_label();
2243 int l2
= gen_new_label();
2245 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2246 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2247 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2248 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2249 tcg_gen_movi_tl(cpu_HI
[0], 0);
2252 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2253 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2260 int l1
= gen_new_label();
2262 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2263 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2264 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2270 gen_helper_dmult(cpu_env
, t0
, t1
);
2274 gen_helper_dmultu(cpu_env
, t0
, t1
);
2280 TCGv_i64 t2
= tcg_temp_new_i64();
2281 TCGv_i64 t3
= tcg_temp_new_i64();
2283 tcg_gen_ext_tl_i64(t2
, t0
);
2284 tcg_gen_ext_tl_i64(t3
, t1
);
2285 tcg_gen_mul_i64(t2
, t2
, t3
);
2286 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2287 tcg_gen_add_i64(t2
, t2
, t3
);
2288 tcg_temp_free_i64(t3
);
2289 tcg_gen_trunc_i64_tl(t0
, t2
);
2290 tcg_gen_shri_i64(t2
, t2
, 32);
2291 tcg_gen_trunc_i64_tl(t1
, t2
);
2292 tcg_temp_free_i64(t2
);
2293 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2294 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2300 TCGv_i64 t2
= tcg_temp_new_i64();
2301 TCGv_i64 t3
= tcg_temp_new_i64();
2303 tcg_gen_ext32u_tl(t0
, t0
);
2304 tcg_gen_ext32u_tl(t1
, t1
);
2305 tcg_gen_extu_tl_i64(t2
, t0
);
2306 tcg_gen_extu_tl_i64(t3
, t1
);
2307 tcg_gen_mul_i64(t2
, t2
, t3
);
2308 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2309 tcg_gen_add_i64(t2
, t2
, t3
);
2310 tcg_temp_free_i64(t3
);
2311 tcg_gen_trunc_i64_tl(t0
, t2
);
2312 tcg_gen_shri_i64(t2
, t2
, 32);
2313 tcg_gen_trunc_i64_tl(t1
, t2
);
2314 tcg_temp_free_i64(t2
);
2315 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2316 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2322 TCGv_i64 t2
= tcg_temp_new_i64();
2323 TCGv_i64 t3
= tcg_temp_new_i64();
2325 tcg_gen_ext_tl_i64(t2
, t0
);
2326 tcg_gen_ext_tl_i64(t3
, t1
);
2327 tcg_gen_mul_i64(t2
, t2
, t3
);
2328 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2329 tcg_gen_sub_i64(t2
, t3
, t2
);
2330 tcg_temp_free_i64(t3
);
2331 tcg_gen_trunc_i64_tl(t0
, t2
);
2332 tcg_gen_shri_i64(t2
, t2
, 32);
2333 tcg_gen_trunc_i64_tl(t1
, t2
);
2334 tcg_temp_free_i64(t2
);
2335 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2336 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2342 TCGv_i64 t2
= tcg_temp_new_i64();
2343 TCGv_i64 t3
= tcg_temp_new_i64();
2345 tcg_gen_ext32u_tl(t0
, t0
);
2346 tcg_gen_ext32u_tl(t1
, t1
);
2347 tcg_gen_extu_tl_i64(t2
, t0
);
2348 tcg_gen_extu_tl_i64(t3
, t1
);
2349 tcg_gen_mul_i64(t2
, t2
, t3
);
2350 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2351 tcg_gen_sub_i64(t2
, t3
, t2
);
2352 tcg_temp_free_i64(t3
);
2353 tcg_gen_trunc_i64_tl(t0
, t2
);
2354 tcg_gen_shri_i64(t2
, t2
, 32);
2355 tcg_gen_trunc_i64_tl(t1
, t2
);
2356 tcg_temp_free_i64(t2
);
2357 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2358 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2364 generate_exception(ctx
, EXCP_RI
);
2367 (void)opn
; /* avoid a compiler warning */
2368 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2374 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2375 int rd
, int rs
, int rt
)
2377 const char *opn
= "mul vr54xx";
2378 TCGv t0
= tcg_temp_new();
2379 TCGv t1
= tcg_temp_new();
2381 gen_load_gpr(t0
, rs
);
2382 gen_load_gpr(t1
, rt
);
2385 case OPC_VR54XX_MULS
:
2386 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
2389 case OPC_VR54XX_MULSU
:
2390 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
2393 case OPC_VR54XX_MACC
:
2394 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
2397 case OPC_VR54XX_MACCU
:
2398 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
2401 case OPC_VR54XX_MSAC
:
2402 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
2405 case OPC_VR54XX_MSACU
:
2406 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
2409 case OPC_VR54XX_MULHI
:
2410 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
2413 case OPC_VR54XX_MULHIU
:
2414 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
2417 case OPC_VR54XX_MULSHI
:
2418 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
2421 case OPC_VR54XX_MULSHIU
:
2422 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
2425 case OPC_VR54XX_MACCHI
:
2426 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
2429 case OPC_VR54XX_MACCHIU
:
2430 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
2433 case OPC_VR54XX_MSACHI
:
2434 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
2437 case OPC_VR54XX_MSACHIU
:
2438 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
2442 MIPS_INVAL("mul vr54xx");
2443 generate_exception(ctx
, EXCP_RI
);
2446 gen_store_gpr(t0
, rd
);
2447 (void)opn
; /* avoid a compiler warning */
2448 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2455 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2458 const char *opn
= "CLx";
2466 t0
= tcg_temp_new();
2467 gen_load_gpr(t0
, rs
);
2470 gen_helper_clo(cpu_gpr
[rd
], t0
);
2474 gen_helper_clz(cpu_gpr
[rd
], t0
);
2477 #if defined(TARGET_MIPS64)
2479 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2483 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2488 (void)opn
; /* avoid a compiler warning */
2489 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2493 /* Godson integer instructions */
2494 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
2495 int rd
, int rs
, int rt
)
2497 const char *opn
= "loongson";
2509 case OPC_MULTU_G_2E
:
2510 case OPC_MULTU_G_2F
:
2511 #if defined(TARGET_MIPS64)
2512 case OPC_DMULT_G_2E
:
2513 case OPC_DMULT_G_2F
:
2514 case OPC_DMULTU_G_2E
:
2515 case OPC_DMULTU_G_2F
:
2517 t0
= tcg_temp_new();
2518 t1
= tcg_temp_new();
2521 t0
= tcg_temp_local_new();
2522 t1
= tcg_temp_local_new();
2526 gen_load_gpr(t0
, rs
);
2527 gen_load_gpr(t1
, rt
);
2532 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2533 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2536 case OPC_MULTU_G_2E
:
2537 case OPC_MULTU_G_2F
:
2538 tcg_gen_ext32u_tl(t0
, t0
);
2539 tcg_gen_ext32u_tl(t1
, t1
);
2540 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2541 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2547 int l1
= gen_new_label();
2548 int l2
= gen_new_label();
2549 int l3
= gen_new_label();
2550 tcg_gen_ext32s_tl(t0
, t0
);
2551 tcg_gen_ext32s_tl(t1
, t1
);
2552 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2553 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2556 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2557 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2558 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2561 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2562 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2570 int l1
= gen_new_label();
2571 int l2
= gen_new_label();
2572 tcg_gen_ext32u_tl(t0
, t0
);
2573 tcg_gen_ext32u_tl(t1
, t1
);
2574 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2575 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2578 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2579 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2587 int l1
= gen_new_label();
2588 int l2
= gen_new_label();
2589 int l3
= gen_new_label();
2590 tcg_gen_ext32u_tl(t0
, t0
);
2591 tcg_gen_ext32u_tl(t1
, t1
);
2592 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2593 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2594 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2596 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2599 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2600 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2608 int l1
= gen_new_label();
2609 int l2
= gen_new_label();
2610 tcg_gen_ext32u_tl(t0
, t0
);
2611 tcg_gen_ext32u_tl(t1
, t1
);
2612 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2613 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2616 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2617 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2622 #if defined(TARGET_MIPS64)
2623 case OPC_DMULT_G_2E
:
2624 case OPC_DMULT_G_2F
:
2625 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2628 case OPC_DMULTU_G_2E
:
2629 case OPC_DMULTU_G_2F
:
2630 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2636 int l1
= gen_new_label();
2637 int l2
= gen_new_label();
2638 int l3
= gen_new_label();
2639 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2640 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2643 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2644 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2645 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2648 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2653 case OPC_DDIVU_G_2E
:
2654 case OPC_DDIVU_G_2F
:
2656 int l1
= gen_new_label();
2657 int l2
= gen_new_label();
2658 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2659 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2662 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2670 int l1
= gen_new_label();
2671 int l2
= gen_new_label();
2672 int l3
= gen_new_label();
2673 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2674 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2675 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2677 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2680 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2685 case OPC_DMODU_G_2E
:
2686 case OPC_DMODU_G_2F
:
2688 int l1
= gen_new_label();
2689 int l2
= gen_new_label();
2690 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2691 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2694 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2702 (void)opn
; /* avoid a compiler warning */
2703 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2708 /* Loongson multimedia instructions */
2709 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
2711 const char *opn
= "loongson_cp2";
2712 uint32_t opc
, shift_max
;
2715 opc
= MASK_LMI(ctx
->opcode
);
2721 t0
= tcg_temp_local_new_i64();
2722 t1
= tcg_temp_local_new_i64();
2725 t0
= tcg_temp_new_i64();
2726 t1
= tcg_temp_new_i64();
2730 gen_load_fpr64(ctx
, t0
, rs
);
2731 gen_load_fpr64(ctx
, t1
, rt
);
2733 #define LMI_HELPER(UP, LO) \
2734 case OPC_##UP: gen_helper_##LO(t0, t0, t1); opn = #LO; break
2735 #define LMI_HELPER_1(UP, LO) \
2736 case OPC_##UP: gen_helper_##LO(t0, t0); opn = #LO; break
2737 #define LMI_DIRECT(UP, LO, OP) \
2738 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); opn = #LO; break
2741 LMI_HELPER(PADDSH
, paddsh
);
2742 LMI_HELPER(PADDUSH
, paddush
);
2743 LMI_HELPER(PADDH
, paddh
);
2744 LMI_HELPER(PADDW
, paddw
);
2745 LMI_HELPER(PADDSB
, paddsb
);
2746 LMI_HELPER(PADDUSB
, paddusb
);
2747 LMI_HELPER(PADDB
, paddb
);
2749 LMI_HELPER(PSUBSH
, psubsh
);
2750 LMI_HELPER(PSUBUSH
, psubush
);
2751 LMI_HELPER(PSUBH
, psubh
);
2752 LMI_HELPER(PSUBW
, psubw
);
2753 LMI_HELPER(PSUBSB
, psubsb
);
2754 LMI_HELPER(PSUBUSB
, psubusb
);
2755 LMI_HELPER(PSUBB
, psubb
);
2757 LMI_HELPER(PSHUFH
, pshufh
);
2758 LMI_HELPER(PACKSSWH
, packsswh
);
2759 LMI_HELPER(PACKSSHB
, packsshb
);
2760 LMI_HELPER(PACKUSHB
, packushb
);
2762 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
2763 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
2764 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
2765 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
2766 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
2767 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
2769 LMI_HELPER(PAVGH
, pavgh
);
2770 LMI_HELPER(PAVGB
, pavgb
);
2771 LMI_HELPER(PMAXSH
, pmaxsh
);
2772 LMI_HELPER(PMINSH
, pminsh
);
2773 LMI_HELPER(PMAXUB
, pmaxub
);
2774 LMI_HELPER(PMINUB
, pminub
);
2776 LMI_HELPER(PCMPEQW
, pcmpeqw
);
2777 LMI_HELPER(PCMPGTW
, pcmpgtw
);
2778 LMI_HELPER(PCMPEQH
, pcmpeqh
);
2779 LMI_HELPER(PCMPGTH
, pcmpgth
);
2780 LMI_HELPER(PCMPEQB
, pcmpeqb
);
2781 LMI_HELPER(PCMPGTB
, pcmpgtb
);
2783 LMI_HELPER(PSLLW
, psllw
);
2784 LMI_HELPER(PSLLH
, psllh
);
2785 LMI_HELPER(PSRLW
, psrlw
);
2786 LMI_HELPER(PSRLH
, psrlh
);
2787 LMI_HELPER(PSRAW
, psraw
);
2788 LMI_HELPER(PSRAH
, psrah
);
2790 LMI_HELPER(PMULLH
, pmullh
);
2791 LMI_HELPER(PMULHH
, pmulhh
);
2792 LMI_HELPER(PMULHUH
, pmulhuh
);
2793 LMI_HELPER(PMADDHW
, pmaddhw
);
2795 LMI_HELPER(PASUBUB
, pasubub
);
2796 LMI_HELPER_1(BIADD
, biadd
);
2797 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
2799 LMI_DIRECT(PADDD
, paddd
, add
);
2800 LMI_DIRECT(PSUBD
, psubd
, sub
);
2801 LMI_DIRECT(XOR_CP2
, xor, xor);
2802 LMI_DIRECT(NOR_CP2
, nor
, nor
);
2803 LMI_DIRECT(AND_CP2
, and, and);
2804 LMI_DIRECT(PANDN
, pandn
, andc
);
2805 LMI_DIRECT(OR
, or, or);
2808 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
2812 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
2816 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
2820 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
2825 tcg_gen_andi_i64(t1
, t1
, 3);
2826 tcg_gen_shli_i64(t1
, t1
, 4);
2827 tcg_gen_shr_i64(t0
, t0
, t1
);
2828 tcg_gen_ext16u_i64(t0
, t0
);
2833 tcg_gen_add_i64(t0
, t0
, t1
);
2834 tcg_gen_ext32s_i64(t0
, t0
);
2838 tcg_gen_sub_i64(t0
, t0
, t1
);
2839 tcg_gen_ext32s_i64(t0
, t0
);
2868 /* Make sure shift count isn't TCG undefined behaviour. */
2869 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
2874 tcg_gen_shl_i64(t0
, t0
, t1
);
2878 /* Since SRA is UndefinedResult without sign-extended inputs,
2879 we can treat SRA and DSRA the same. */
2880 tcg_gen_sar_i64(t0
, t0
, t1
);
2883 /* We want to shift in zeros for SRL; zero-extend first. */
2884 tcg_gen_ext32u_i64(t0
, t0
);
2887 tcg_gen_shr_i64(t0
, t0
, t1
);
2891 if (shift_max
== 32) {
2892 tcg_gen_ext32s_i64(t0
, t0
);
2895 /* Shifts larger than MAX produce zero. */
2896 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
2897 tcg_gen_neg_i64(t1
, t1
);
2898 tcg_gen_and_i64(t0
, t0
, t1
);
2904 TCGv_i64 t2
= tcg_temp_new_i64();
2905 int lab
= gen_new_label();
2907 tcg_gen_mov_i64(t2
, t0
);
2908 tcg_gen_add_i64(t0
, t1
, t2
);
2909 if (opc
== OPC_ADD_CP2
) {
2910 tcg_gen_ext32s_i64(t0
, t0
);
2912 tcg_gen_xor_i64(t1
, t1
, t2
);
2913 tcg_gen_xor_i64(t2
, t2
, t0
);
2914 tcg_gen_andc_i64(t1
, t2
, t1
);
2915 tcg_temp_free_i64(t2
);
2916 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2917 generate_exception(ctx
, EXCP_OVERFLOW
);
2920 opn
= (opc
== OPC_ADD_CP2
? "add" : "dadd");
2927 TCGv_i64 t2
= tcg_temp_new_i64();
2928 int lab
= gen_new_label();
2930 tcg_gen_mov_i64(t2
, t0
);
2931 tcg_gen_sub_i64(t0
, t1
, t2
);
2932 if (opc
== OPC_SUB_CP2
) {
2933 tcg_gen_ext32s_i64(t0
, t0
);
2935 tcg_gen_xor_i64(t1
, t1
, t2
);
2936 tcg_gen_xor_i64(t2
, t2
, t0
);
2937 tcg_gen_and_i64(t1
, t1
, t2
);
2938 tcg_temp_free_i64(t2
);
2939 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2940 generate_exception(ctx
, EXCP_OVERFLOW
);
2943 opn
= (opc
== OPC_SUB_CP2
? "sub" : "dsub");
2948 tcg_gen_ext32u_i64(t0
, t0
);
2949 tcg_gen_ext32u_i64(t1
, t1
);
2950 tcg_gen_mul_i64(t0
, t0
, t1
);
2960 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
2961 FD field is the CC field? */
2964 generate_exception(ctx
, EXCP_RI
);
2971 gen_store_fpr64(ctx
, t0
, rd
);
2973 (void)opn
; /* avoid a compiler warning */
2974 MIPS_DEBUG("%s %s, %s, %s", opn
,
2975 fregnames
[rd
], fregnames
[rs
], fregnames
[rt
]);
2976 tcg_temp_free_i64(t0
);
2977 tcg_temp_free_i64(t1
);
2981 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2982 int rs
, int rt
, int16_t imm
)
2985 TCGv t0
= tcg_temp_new();
2986 TCGv t1
= tcg_temp_new();
2989 /* Load needed operands */
2997 /* Compare two registers */
2999 gen_load_gpr(t0
, rs
);
3000 gen_load_gpr(t1
, rt
);
3010 /* Compare register to immediate */
3011 if (rs
!= 0 || imm
!= 0) {
3012 gen_load_gpr(t0
, rs
);
3013 tcg_gen_movi_tl(t1
, (int32_t)imm
);
3020 case OPC_TEQ
: /* rs == rs */
3021 case OPC_TEQI
: /* r0 == 0 */
3022 case OPC_TGE
: /* rs >= rs */
3023 case OPC_TGEI
: /* r0 >= 0 */
3024 case OPC_TGEU
: /* rs >= rs unsigned */
3025 case OPC_TGEIU
: /* r0 >= 0 unsigned */
3027 generate_exception(ctx
, EXCP_TRAP
);
3029 case OPC_TLT
: /* rs < rs */
3030 case OPC_TLTI
: /* r0 < 0 */
3031 case OPC_TLTU
: /* rs < rs unsigned */
3032 case OPC_TLTIU
: /* r0 < 0 unsigned */
3033 case OPC_TNE
: /* rs != rs */
3034 case OPC_TNEI
: /* r0 != 0 */
3035 /* Never trap: treat as NOP. */
3039 int l1
= gen_new_label();
3044 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
3048 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
3052 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
3056 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
3060 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
3064 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
3067 generate_exception(ctx
, EXCP_TRAP
);
3074 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
3076 TranslationBlock
*tb
;
3078 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
3079 likely(!ctx
->singlestep_enabled
)) {
3082 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
3085 if (ctx
->singlestep_enabled
) {
3086 save_cpu_state(ctx
, 0);
3087 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
3093 /* Branches (before delay slot) */
3094 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
3096 int rs
, int rt
, int32_t offset
)
3098 target_ulong btgt
= -1;
3100 int bcond_compute
= 0;
3101 TCGv t0
= tcg_temp_new();
3102 TCGv t1
= tcg_temp_new();
3104 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3105 #ifdef MIPS_DEBUG_DISAS
3106 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
3108 generate_exception(ctx
, EXCP_RI
);
3112 /* Load needed operands */
3118 /* Compare two registers */
3120 gen_load_gpr(t0
, rs
);
3121 gen_load_gpr(t1
, rt
);
3124 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3140 /* Compare to zero */
3142 gen_load_gpr(t0
, rs
);
3145 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3152 /* Jump to immediate */
3153 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
3159 /* Jump to register */
3160 if (offset
!= 0 && offset
!= 16) {
3161 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
3162 others are reserved. */
3163 MIPS_INVAL("jump hint");
3164 generate_exception(ctx
, EXCP_RI
);
3167 gen_load_gpr(btarget
, rs
);
3170 MIPS_INVAL("branch/jump");
3171 generate_exception(ctx
, EXCP_RI
);
3174 if (bcond_compute
== 0) {
3175 /* No condition to be computed */
3177 case OPC_BEQ
: /* rx == rx */
3178 case OPC_BEQL
: /* rx == rx likely */
3179 case OPC_BGEZ
: /* 0 >= 0 */
3180 case OPC_BGEZL
: /* 0 >= 0 likely */
3181 case OPC_BLEZ
: /* 0 <= 0 */
3182 case OPC_BLEZL
: /* 0 <= 0 likely */
3184 ctx
->hflags
|= MIPS_HFLAG_B
;
3185 MIPS_DEBUG("balways");
3188 case OPC_BGEZAL
: /* 0 >= 0 */
3189 case OPC_BGEZALL
: /* 0 >= 0 likely */
3190 ctx
->hflags
|= (opc
== OPC_BGEZALS
3192 : MIPS_HFLAG_BDS32
);
3193 /* Always take and link */
3195 ctx
->hflags
|= MIPS_HFLAG_B
;
3196 MIPS_DEBUG("balways and link");
3198 case OPC_BNE
: /* rx != rx */
3199 case OPC_BGTZ
: /* 0 > 0 */
3200 case OPC_BLTZ
: /* 0 < 0 */
3202 MIPS_DEBUG("bnever (NOP)");
3205 case OPC_BLTZAL
: /* 0 < 0 */
3206 ctx
->hflags
|= (opc
== OPC_BLTZALS
3208 : MIPS_HFLAG_BDS32
);
3209 /* Handle as an unconditional branch to get correct delay
3212 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
3213 ctx
->hflags
|= MIPS_HFLAG_B
;
3214 MIPS_DEBUG("bnever and link");
3216 case OPC_BLTZALL
: /* 0 < 0 likely */
3217 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
3218 /* Skip the instruction in the delay slot */
3219 MIPS_DEBUG("bnever, link and skip");
3222 case OPC_BNEL
: /* rx != rx likely */
3223 case OPC_BGTZL
: /* 0 > 0 likely */
3224 case OPC_BLTZL
: /* 0 < 0 likely */
3225 /* Skip the instruction in the delay slot */
3226 MIPS_DEBUG("bnever and skip");
3230 ctx
->hflags
|= MIPS_HFLAG_B
;
3231 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
3235 ctx
->hflags
|= MIPS_HFLAG_BX
;
3240 ctx
->hflags
|= MIPS_HFLAG_B
;
3241 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
3243 : MIPS_HFLAG_BDS32
);
3244 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
3247 ctx
->hflags
|= MIPS_HFLAG_BR
;
3248 if (insn_bytes
== 4)
3249 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
3250 MIPS_DEBUG("jr %s", regnames
[rs
]);
3256 ctx
->hflags
|= MIPS_HFLAG_BR
;
3257 ctx
->hflags
|= (opc
== OPC_JALRS
3259 : MIPS_HFLAG_BDS32
);
3260 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
3263 MIPS_INVAL("branch/jump");
3264 generate_exception(ctx
, EXCP_RI
);
3270 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3271 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
3272 regnames
[rs
], regnames
[rt
], btgt
);
3275 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3276 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
3277 regnames
[rs
], regnames
[rt
], btgt
);
3280 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3281 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
3282 regnames
[rs
], regnames
[rt
], btgt
);
3285 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3286 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
3287 regnames
[rs
], regnames
[rt
], btgt
);
3290 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3291 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3294 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3295 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3299 ctx
->hflags
|= (opc
== OPC_BGEZALS
3301 : MIPS_HFLAG_BDS32
);
3302 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3303 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3307 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3309 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3312 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3313 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3316 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3317 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3320 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3321 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3324 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3325 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3328 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3329 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3332 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3333 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3337 ctx
->hflags
|= (opc
== OPC_BLTZALS
3339 : MIPS_HFLAG_BDS32
);
3340 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3342 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3344 ctx
->hflags
|= MIPS_HFLAG_BC
;
3347 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3349 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3351 ctx
->hflags
|= MIPS_HFLAG_BL
;
3354 MIPS_INVAL("conditional branch/jump");
3355 generate_exception(ctx
, EXCP_RI
);
3359 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
3360 blink
, ctx
->hflags
, btgt
);
3362 ctx
->btarget
= btgt
;
3364 int post_delay
= insn_bytes
;
3365 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
3367 if (opc
!= OPC_JALRC
)
3368 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
3370 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
3374 if (insn_bytes
== 2)
3375 ctx
->hflags
|= MIPS_HFLAG_B16
;
3380 /* special3 bitfield operations */
3381 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
3382 int rs
, int lsb
, int msb
)
3384 TCGv t0
= tcg_temp_new();
3385 TCGv t1
= tcg_temp_new();
3388 gen_load_gpr(t1
, rs
);
3393 tcg_gen_shri_tl(t0
, t1
, lsb
);
3395 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
3397 tcg_gen_ext32s_tl(t0
, t0
);
3400 #if defined(TARGET_MIPS64)
3402 tcg_gen_shri_tl(t0
, t1
, lsb
);
3404 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3408 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3409 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3412 tcg_gen_shri_tl(t0
, t1
, lsb
);
3413 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3419 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
3420 gen_load_gpr(t0
, rt
);
3421 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3422 tcg_gen_shli_tl(t1
, t1
, lsb
);
3423 tcg_gen_andi_tl(t1
, t1
, mask
);
3424 tcg_gen_or_tl(t0
, t0
, t1
);
3425 tcg_gen_ext32s_tl(t0
, t0
);
3427 #if defined(TARGET_MIPS64)
3431 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
3432 gen_load_gpr(t0
, rt
);
3433 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3434 tcg_gen_shli_tl(t1
, t1
, lsb
);
3435 tcg_gen_andi_tl(t1
, t1
, mask
);
3436 tcg_gen_or_tl(t0
, t0
, t1
);
3441 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
3442 gen_load_gpr(t0
, rt
);
3443 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3444 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
3445 tcg_gen_andi_tl(t1
, t1
, mask
);
3446 tcg_gen_or_tl(t0
, t0
, t1
);
3451 gen_load_gpr(t0
, rt
);
3452 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
3453 gen_load_gpr(t0
, rt
);
3454 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3455 tcg_gen_shli_tl(t1
, t1
, lsb
);
3456 tcg_gen_andi_tl(t1
, t1
, mask
);
3457 tcg_gen_or_tl(t0
, t0
, t1
);
3462 MIPS_INVAL("bitops");
3463 generate_exception(ctx
, EXCP_RI
);
3468 gen_store_gpr(t0
, rt
);
3473 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3478 /* If no destination, treat it as a NOP. */
3483 t0
= tcg_temp_new();
3484 gen_load_gpr(t0
, rt
);
3488 TCGv t1
= tcg_temp_new();
3490 tcg_gen_shri_tl(t1
, t0
, 8);
3491 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3492 tcg_gen_shli_tl(t0
, t0
, 8);
3493 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3494 tcg_gen_or_tl(t0
, t0
, t1
);
3496 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3500 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
3503 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
3505 #if defined(TARGET_MIPS64)
3508 TCGv t1
= tcg_temp_new();
3510 tcg_gen_shri_tl(t1
, t0
, 8);
3511 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
3512 tcg_gen_shli_tl(t0
, t0
, 8);
3513 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
3514 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3520 TCGv t1
= tcg_temp_new();
3522 tcg_gen_shri_tl(t1
, t0
, 16);
3523 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
3524 tcg_gen_shli_tl(t0
, t0
, 16);
3525 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
3526 tcg_gen_or_tl(t0
, t0
, t1
);
3527 tcg_gen_shri_tl(t1
, t0
, 32);
3528 tcg_gen_shli_tl(t0
, t0
, 32);
3529 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3535 MIPS_INVAL("bsfhl");
3536 generate_exception(ctx
, EXCP_RI
);
3543 #ifndef CONFIG_USER_ONLY
3544 /* CP0 (MMU and control) */
3545 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
3547 TCGv_i32 t0
= tcg_temp_new_i32();
3549 tcg_gen_ld_i32(t0
, cpu_env
, off
);
3550 tcg_gen_ext_i32_tl(arg
, t0
);
3551 tcg_temp_free_i32(t0
);
3554 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
3556 tcg_gen_ld_tl(arg
, cpu_env
, off
);
3557 tcg_gen_ext32s_tl(arg
, arg
);
3560 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
3562 TCGv_i32 t0
= tcg_temp_new_i32();
3564 tcg_gen_trunc_tl_i32(t0
, arg
);
3565 tcg_gen_st_i32(t0
, cpu_env
, off
);
3566 tcg_temp_free_i32(t0
);
3569 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
3571 tcg_gen_ext32s_tl(arg
, arg
);
3572 tcg_gen_st_tl(arg
, cpu_env
, off
);
3575 static void gen_mfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3577 const char *rn
= "invalid";
3580 check_insn(env
, ctx
, ISA_MIPS32
);
3586 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
3590 check_insn(env
, ctx
, ASE_MT
);
3591 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
3595 check_insn(env
, ctx
, ASE_MT
);
3596 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
3600 check_insn(env
, ctx
, ASE_MT
);
3601 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
3611 gen_helper_mfc0_random(arg
, cpu_env
);
3615 check_insn(env
, ctx
, ASE_MT
);
3616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
3620 check_insn(env
, ctx
, ASE_MT
);
3621 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
3625 check_insn(env
, ctx
, ASE_MT
);
3626 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
3630 check_insn(env
, ctx
, ASE_MT
);
3631 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
3635 check_insn(env
, ctx
, ASE_MT
);
3636 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
3640 check_insn(env
, ctx
, ASE_MT
);
3641 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
3642 rn
= "VPEScheFBack";
3645 check_insn(env
, ctx
, ASE_MT
);
3646 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
3656 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
3657 tcg_gen_ext32s_tl(arg
, arg
);
3661 check_insn(env
, ctx
, ASE_MT
);
3662 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
3666 check_insn(env
, ctx
, ASE_MT
);
3667 gen_helper_mfc0_tcbind(arg
, cpu_env
);
3671 check_insn(env
, ctx
, ASE_MT
);
3672 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
3676 check_insn(env
, ctx
, ASE_MT
);
3677 gen_helper_mfc0_tchalt(arg
, cpu_env
);
3681 check_insn(env
, ctx
, ASE_MT
);
3682 gen_helper_mfc0_tccontext(arg
, cpu_env
);
3686 check_insn(env
, ctx
, ASE_MT
);
3687 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
3691 check_insn(env
, ctx
, ASE_MT
);
3692 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
3702 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
3703 tcg_gen_ext32s_tl(arg
, arg
);
3713 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
3714 tcg_gen_ext32s_tl(arg
, arg
);
3718 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3719 rn
= "ContextConfig";
3728 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
3732 check_insn(env
, ctx
, ISA_MIPS32R2
);
3733 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
3743 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
3747 check_insn(env
, ctx
, ISA_MIPS32R2
);
3748 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
3752 check_insn(env
, ctx
, ISA_MIPS32R2
);
3753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
3757 check_insn(env
, ctx
, ISA_MIPS32R2
);
3758 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
3762 check_insn(env
, ctx
, ISA_MIPS32R2
);
3763 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
3767 check_insn(env
, ctx
, ISA_MIPS32R2
);
3768 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
3778 check_insn(env
, ctx
, ISA_MIPS32R2
);
3779 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
3789 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
3790 tcg_gen_ext32s_tl(arg
, arg
);
3800 /* Mark as an IO operation because we read the time. */
3803 gen_helper_mfc0_count(arg
, cpu_env
);
3807 /* Break the TB to be able to take timer interrupts immediately
3808 after reading count. */
3809 ctx
->bstate
= BS_STOP
;
3812 /* 6,7 are implementation dependent */
3820 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
3821 tcg_gen_ext32s_tl(arg
, arg
);
3831 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
3834 /* 6,7 are implementation dependent */
3842 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
3846 check_insn(env
, ctx
, ISA_MIPS32R2
);
3847 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
3851 check_insn(env
, ctx
, ISA_MIPS32R2
);
3852 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
3856 check_insn(env
, ctx
, ISA_MIPS32R2
);
3857 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
3867 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
3877 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
3878 tcg_gen_ext32s_tl(arg
, arg
);
3888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
3892 check_insn(env
, ctx
, ISA_MIPS32R2
);
3893 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
3903 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
3907 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
3911 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
3915 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
3918 /* 4,5 are reserved */
3919 /* 6,7 are implementation dependent */
3921 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
3925 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
3935 gen_helper_mfc0_lladdr(arg
, cpu_env
);
3945 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
3955 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
3965 #if defined(TARGET_MIPS64)
3966 check_insn(env
, ctx
, ISA_MIPS3
);
3967 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
3968 tcg_gen_ext32s_tl(arg
, arg
);
3977 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3980 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
3988 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3989 rn
= "'Diagnostic"; /* implementation dependent */
3994 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
3998 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3999 rn
= "TraceControl";
4002 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
4003 rn
= "TraceControl2";
4006 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
4007 rn
= "UserTraceData";
4010 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
4021 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
4022 tcg_gen_ext32s_tl(arg
, arg
);
4032 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
4033 rn
= "Performance0";
4036 // gen_helper_mfc0_performance1(arg);
4037 rn
= "Performance1";
4040 // gen_helper_mfc0_performance2(arg);
4041 rn
= "Performance2";
4044 // gen_helper_mfc0_performance3(arg);
4045 rn
= "Performance3";
4048 // gen_helper_mfc0_performance4(arg);
4049 rn
= "Performance4";
4052 // gen_helper_mfc0_performance5(arg);
4053 rn
= "Performance5";
4056 // gen_helper_mfc0_performance6(arg);
4057 rn
= "Performance6";
4060 // gen_helper_mfc0_performance7(arg);
4061 rn
= "Performance7";
4068 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4074 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4087 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4094 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
4107 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
4114 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
4124 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4125 tcg_gen_ext32s_tl(arg
, arg
);
4136 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4146 (void)rn
; /* avoid a compiler warning */
4147 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4151 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4152 generate_exception(ctx
, EXCP_RI
);
4155 static void gen_mtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4157 const char *rn
= "invalid";
4160 check_insn(env
, ctx
, ISA_MIPS32
);
4169 gen_helper_mtc0_index(cpu_env
, arg
);
4173 check_insn(env
, ctx
, ASE_MT
);
4174 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
4178 check_insn(env
, ctx
, ASE_MT
);
4183 check_insn(env
, ctx
, ASE_MT
);
4198 check_insn(env
, ctx
, ASE_MT
);
4199 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
4203 check_insn(env
, ctx
, ASE_MT
);
4204 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
4208 check_insn(env
, ctx
, ASE_MT
);
4209 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
4213 check_insn(env
, ctx
, ASE_MT
);
4214 gen_helper_mtc0_yqmask(cpu_env
, arg
);
4218 check_insn(env
, ctx
, ASE_MT
);
4219 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4223 check_insn(env
, ctx
, ASE_MT
);
4224 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4225 rn
= "VPEScheFBack";
4228 check_insn(env
, ctx
, ASE_MT
);
4229 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
4239 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
4243 check_insn(env
, ctx
, ASE_MT
);
4244 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
4248 check_insn(env
, ctx
, ASE_MT
);
4249 gen_helper_mtc0_tcbind(cpu_env
, arg
);
4253 check_insn(env
, ctx
, ASE_MT
);
4254 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
4258 check_insn(env
, ctx
, ASE_MT
);
4259 gen_helper_mtc0_tchalt(cpu_env
, arg
);
4263 check_insn(env
, ctx
, ASE_MT
);
4264 gen_helper_mtc0_tccontext(cpu_env
, arg
);
4268 check_insn(env
, ctx
, ASE_MT
);
4269 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
4273 check_insn(env
, ctx
, ASE_MT
);
4274 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
4284 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
4294 gen_helper_mtc0_context(cpu_env
, arg
);
4298 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
4299 rn
= "ContextConfig";
4308 gen_helper_mtc0_pagemask(cpu_env
, arg
);
4312 check_insn(env
, ctx
, ISA_MIPS32R2
);
4313 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
4323 gen_helper_mtc0_wired(cpu_env
, arg
);
4327 check_insn(env
, ctx
, ISA_MIPS32R2
);
4328 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
4332 check_insn(env
, ctx
, ISA_MIPS32R2
);
4333 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
4337 check_insn(env
, ctx
, ISA_MIPS32R2
);
4338 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
4342 check_insn(env
, ctx
, ISA_MIPS32R2
);
4343 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
4347 check_insn(env
, ctx
, ISA_MIPS32R2
);
4348 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
4358 check_insn(env
, ctx
, ISA_MIPS32R2
);
4359 gen_helper_mtc0_hwrena(cpu_env
, arg
);
4373 gen_helper_mtc0_count(cpu_env
, arg
);
4376 /* 6,7 are implementation dependent */
4384 gen_helper_mtc0_entryhi(cpu_env
, arg
);
4394 gen_helper_mtc0_compare(cpu_env
, arg
);
4397 /* 6,7 are implementation dependent */
4405 save_cpu_state(ctx
, 1);
4406 gen_helper_mtc0_status(cpu_env
, arg
);
4407 /* BS_STOP isn't good enough here, hflags may have changed. */
4408 gen_save_pc(ctx
->pc
+ 4);
4409 ctx
->bstate
= BS_EXCP
;
4413 check_insn(env
, ctx
, ISA_MIPS32R2
);
4414 gen_helper_mtc0_intctl(cpu_env
, arg
);
4415 /* Stop translation as we may have switched the execution mode */
4416 ctx
->bstate
= BS_STOP
;
4420 check_insn(env
, ctx
, ISA_MIPS32R2
);
4421 gen_helper_mtc0_srsctl(cpu_env
, arg
);
4422 /* Stop translation as we may have switched the execution mode */
4423 ctx
->bstate
= BS_STOP
;
4427 check_insn(env
, ctx
, ISA_MIPS32R2
);
4428 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4429 /* Stop translation as we may have switched the execution mode */
4430 ctx
->bstate
= BS_STOP
;
4440 save_cpu_state(ctx
, 1);
4441 gen_helper_mtc0_cause(cpu_env
, arg
);
4451 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
4465 check_insn(env
, ctx
, ISA_MIPS32R2
);
4466 gen_helper_mtc0_ebase(cpu_env
, arg
);
4476 gen_helper_mtc0_config0(cpu_env
, arg
);
4478 /* Stop translation as we may have switched the execution mode */
4479 ctx
->bstate
= BS_STOP
;
4482 /* ignored, read only */
4486 gen_helper_mtc0_config2(cpu_env
, arg
);
4488 /* Stop translation as we may have switched the execution mode */
4489 ctx
->bstate
= BS_STOP
;
4492 /* ignored, read only */
4495 /* 4,5 are reserved */
4496 /* 6,7 are implementation dependent */
4506 rn
= "Invalid config selector";
4513 gen_helper_mtc0_lladdr(cpu_env
, arg
);
4523 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
4533 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
4543 #if defined(TARGET_MIPS64)
4544 check_insn(env
, ctx
, ISA_MIPS3
);
4545 gen_helper_mtc0_xcontext(cpu_env
, arg
);
4554 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4557 gen_helper_mtc0_framemask(cpu_env
, arg
);
4566 rn
= "Diagnostic"; /* implementation dependent */
4571 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
4572 /* BS_STOP isn't good enough here, hflags may have changed. */
4573 gen_save_pc(ctx
->pc
+ 4);
4574 ctx
->bstate
= BS_EXCP
;
4578 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
4579 rn
= "TraceControl";
4580 /* Stop translation as we may have switched the execution mode */
4581 ctx
->bstate
= BS_STOP
;
4584 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
4585 rn
= "TraceControl2";
4586 /* Stop translation as we may have switched the execution mode */
4587 ctx
->bstate
= BS_STOP
;
4590 /* Stop translation as we may have switched the execution mode */
4591 ctx
->bstate
= BS_STOP
;
4592 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
4593 rn
= "UserTraceData";
4594 /* Stop translation as we may have switched the execution mode */
4595 ctx
->bstate
= BS_STOP
;
4598 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
4599 /* Stop translation as we may have switched the execution mode */
4600 ctx
->bstate
= BS_STOP
;
4611 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
4621 gen_helper_mtc0_performance0(cpu_env
, arg
);
4622 rn
= "Performance0";
4625 // gen_helper_mtc0_performance1(arg);
4626 rn
= "Performance1";
4629 // gen_helper_mtc0_performance2(arg);
4630 rn
= "Performance2";
4633 // gen_helper_mtc0_performance3(arg);
4634 rn
= "Performance3";
4637 // gen_helper_mtc0_performance4(arg);
4638 rn
= "Performance4";
4641 // gen_helper_mtc0_performance5(arg);
4642 rn
= "Performance5";
4645 // gen_helper_mtc0_performance6(arg);
4646 rn
= "Performance6";
4649 // gen_helper_mtc0_performance7(arg);
4650 rn
= "Performance7";
4676 gen_helper_mtc0_taglo(cpu_env
, arg
);
4683 gen_helper_mtc0_datalo(cpu_env
, arg
);
4696 gen_helper_mtc0_taghi(cpu_env
, arg
);
4703 gen_helper_mtc0_datahi(cpu_env
, arg
);
4714 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4725 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4731 /* Stop translation as we may have switched the execution mode */
4732 ctx
->bstate
= BS_STOP
;
4737 (void)rn
; /* avoid a compiler warning */
4738 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4739 /* For simplicity assume that all writes can cause interrupts. */
4742 ctx
->bstate
= BS_STOP
;
4747 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4748 generate_exception(ctx
, EXCP_RI
);
4751 #if defined(TARGET_MIPS64)
4752 static void gen_dmfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4754 const char *rn
= "invalid";
4757 check_insn(env
, ctx
, ISA_MIPS64
);
4763 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4767 check_insn(env
, ctx
, ASE_MT
);
4768 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4772 check_insn(env
, ctx
, ASE_MT
);
4773 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4777 check_insn(env
, ctx
, ASE_MT
);
4778 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4788 gen_helper_mfc0_random(arg
, cpu_env
);
4792 check_insn(env
, ctx
, ASE_MT
);
4793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4797 check_insn(env
, ctx
, ASE_MT
);
4798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4802 check_insn(env
, ctx
, ASE_MT
);
4803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4807 check_insn(env
, ctx
, ASE_MT
);
4808 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
4812 check_insn(env
, ctx
, ASE_MT
);
4813 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4817 check_insn(env
, ctx
, ASE_MT
);
4818 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4819 rn
= "VPEScheFBack";
4822 check_insn(env
, ctx
, ASE_MT
);
4823 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4833 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4837 check_insn(env
, ctx
, ASE_MT
);
4838 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
4842 check_insn(env
, ctx
, ASE_MT
);
4843 gen_helper_mfc0_tcbind(arg
, cpu_env
);
4847 check_insn(env
, ctx
, ASE_MT
);
4848 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
4852 check_insn(env
, ctx
, ASE_MT
);
4853 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
4857 check_insn(env
, ctx
, ASE_MT
);
4858 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
4862 check_insn(env
, ctx
, ASE_MT
);
4863 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
4867 check_insn(env
, ctx
, ASE_MT
);
4868 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
4878 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4888 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
4892 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4893 rn
= "ContextConfig";
4902 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
4906 check_insn(env
, ctx
, ISA_MIPS32R2
);
4907 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
4917 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
4921 check_insn(env
, ctx
, ISA_MIPS32R2
);
4922 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
4926 check_insn(env
, ctx
, ISA_MIPS32R2
);
4927 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
4931 check_insn(env
, ctx
, ISA_MIPS32R2
);
4932 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
4936 check_insn(env
, ctx
, ISA_MIPS32R2
);
4937 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
4941 check_insn(env
, ctx
, ISA_MIPS32R2
);
4942 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
4952 check_insn(env
, ctx
, ISA_MIPS32R2
);
4953 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
4963 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
4973 /* Mark as an IO operation because we read the time. */
4976 gen_helper_mfc0_count(arg
, cpu_env
);
4980 /* Break the TB to be able to take timer interrupts immediately
4981 after reading count. */
4982 ctx
->bstate
= BS_STOP
;
4985 /* 6,7 are implementation dependent */
4993 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5003 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5006 /* 6,7 are implementation dependent */
5014 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5018 check_insn(env
, ctx
, ISA_MIPS32R2
);
5019 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5023 check_insn(env
, ctx
, ISA_MIPS32R2
);
5024 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5028 check_insn(env
, ctx
, ISA_MIPS32R2
);
5029 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5039 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5049 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5059 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5063 check_insn(env
, ctx
, ISA_MIPS32R2
);
5064 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5074 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5078 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5082 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5089 /* 6,7 are implementation dependent */
5091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5105 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
5115 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
5125 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5135 check_insn(env
, ctx
, ISA_MIPS3
);
5136 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5144 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5147 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5155 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5156 rn
= "'Diagnostic"; /* implementation dependent */
5161 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5165 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
5166 rn
= "TraceControl";
5169 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
5170 rn
= "TraceControl2";
5173 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
5174 rn
= "UserTraceData";
5177 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
5188 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5198 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5199 rn
= "Performance0";
5202 // gen_helper_dmfc0_performance1(arg);
5203 rn
= "Performance1";
5206 // gen_helper_dmfc0_performance2(arg);
5207 rn
= "Performance2";
5210 // gen_helper_dmfc0_performance3(arg);
5211 rn
= "Performance3";
5214 // gen_helper_dmfc0_performance4(arg);
5215 rn
= "Performance4";
5218 // gen_helper_dmfc0_performance5(arg);
5219 rn
= "Performance5";
5222 // gen_helper_dmfc0_performance6(arg);
5223 rn
= "Performance6";
5226 // gen_helper_dmfc0_performance7(arg);
5227 rn
= "Performance7";
5234 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5241 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5281 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5291 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5302 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5312 (void)rn
; /* avoid a compiler warning */
5313 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5317 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5318 generate_exception(ctx
, EXCP_RI
);
5321 static void gen_dmtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5323 const char *rn
= "invalid";
5326 check_insn(env
, ctx
, ISA_MIPS64
);
5335 gen_helper_mtc0_index(cpu_env
, arg
);
5339 check_insn(env
, ctx
, ASE_MT
);
5340 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5344 check_insn(env
, ctx
, ASE_MT
);
5349 check_insn(env
, ctx
, ASE_MT
);
5364 check_insn(env
, ctx
, ASE_MT
);
5365 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5369 check_insn(env
, ctx
, ASE_MT
);
5370 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5374 check_insn(env
, ctx
, ASE_MT
);
5375 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5379 check_insn(env
, ctx
, ASE_MT
);
5380 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5384 check_insn(env
, ctx
, ASE_MT
);
5385 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5389 check_insn(env
, ctx
, ASE_MT
);
5390 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5391 rn
= "VPEScheFBack";
5394 check_insn(env
, ctx
, ASE_MT
);
5395 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5405 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5409 check_insn(env
, ctx
, ASE_MT
);
5410 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5414 check_insn(env
, ctx
, ASE_MT
);
5415 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5419 check_insn(env
, ctx
, ASE_MT
);
5420 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5424 check_insn(env
, ctx
, ASE_MT
);
5425 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5429 check_insn(env
, ctx
, ASE_MT
);
5430 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5434 check_insn(env
, ctx
, ASE_MT
);
5435 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5439 check_insn(env
, ctx
, ASE_MT
);
5440 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5450 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5460 gen_helper_mtc0_context(cpu_env
, arg
);
5464 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5465 rn
= "ContextConfig";
5474 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5478 check_insn(env
, ctx
, ISA_MIPS32R2
);
5479 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5489 gen_helper_mtc0_wired(cpu_env
, arg
);
5493 check_insn(env
, ctx
, ISA_MIPS32R2
);
5494 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5498 check_insn(env
, ctx
, ISA_MIPS32R2
);
5499 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5503 check_insn(env
, ctx
, ISA_MIPS32R2
);
5504 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5508 check_insn(env
, ctx
, ISA_MIPS32R2
);
5509 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5513 check_insn(env
, ctx
, ISA_MIPS32R2
);
5514 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5524 check_insn(env
, ctx
, ISA_MIPS32R2
);
5525 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5539 gen_helper_mtc0_count(cpu_env
, arg
);
5542 /* 6,7 are implementation dependent */
5546 /* Stop translation as we may have switched the execution mode */
5547 ctx
->bstate
= BS_STOP
;
5552 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5562 gen_helper_mtc0_compare(cpu_env
, arg
);
5565 /* 6,7 are implementation dependent */
5569 /* Stop translation as we may have switched the execution mode */
5570 ctx
->bstate
= BS_STOP
;
5575 save_cpu_state(ctx
, 1);
5576 gen_helper_mtc0_status(cpu_env
, arg
);
5577 /* BS_STOP isn't good enough here, hflags may have changed. */
5578 gen_save_pc(ctx
->pc
+ 4);
5579 ctx
->bstate
= BS_EXCP
;
5583 check_insn(env
, ctx
, ISA_MIPS32R2
);
5584 gen_helper_mtc0_intctl(cpu_env
, arg
);
5585 /* Stop translation as we may have switched the execution mode */
5586 ctx
->bstate
= BS_STOP
;
5590 check_insn(env
, ctx
, ISA_MIPS32R2
);
5591 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5592 /* Stop translation as we may have switched the execution mode */
5593 ctx
->bstate
= BS_STOP
;
5597 check_insn(env
, ctx
, ISA_MIPS32R2
);
5598 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5599 /* Stop translation as we may have switched the execution mode */
5600 ctx
->bstate
= BS_STOP
;
5610 save_cpu_state(ctx
, 1);
5611 /* Mark as an IO operation because we may trigger a software
5616 gen_helper_mtc0_cause(cpu_env
, arg
);
5620 /* Stop translation as we may have triggered an intetrupt */
5621 ctx
->bstate
= BS_STOP
;
5631 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5645 check_insn(env
, ctx
, ISA_MIPS32R2
);
5646 gen_helper_mtc0_ebase(cpu_env
, arg
);
5656 gen_helper_mtc0_config0(cpu_env
, arg
);
5658 /* Stop translation as we may have switched the execution mode */
5659 ctx
->bstate
= BS_STOP
;
5662 /* ignored, read only */
5666 gen_helper_mtc0_config2(cpu_env
, arg
);
5668 /* Stop translation as we may have switched the execution mode */
5669 ctx
->bstate
= BS_STOP
;
5675 /* 6,7 are implementation dependent */
5677 rn
= "Invalid config selector";
5684 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5694 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5704 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5714 check_insn(env
, ctx
, ISA_MIPS3
);
5715 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5723 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5726 gen_helper_mtc0_framemask(cpu_env
, arg
);
5735 rn
= "Diagnostic"; /* implementation dependent */
5740 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
5741 /* BS_STOP isn't good enough here, hflags may have changed. */
5742 gen_save_pc(ctx
->pc
+ 4);
5743 ctx
->bstate
= BS_EXCP
;
5747 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
5748 /* Stop translation as we may have switched the execution mode */
5749 ctx
->bstate
= BS_STOP
;
5750 rn
= "TraceControl";
5753 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
5754 /* Stop translation as we may have switched the execution mode */
5755 ctx
->bstate
= BS_STOP
;
5756 rn
= "TraceControl2";
5759 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
5760 /* Stop translation as we may have switched the execution mode */
5761 ctx
->bstate
= BS_STOP
;
5762 rn
= "UserTraceData";
5765 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
5766 /* Stop translation as we may have switched the execution mode */
5767 ctx
->bstate
= BS_STOP
;
5778 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5788 gen_helper_mtc0_performance0(cpu_env
, arg
);
5789 rn
= "Performance0";
5792 // gen_helper_mtc0_performance1(cpu_env, arg);
5793 rn
= "Performance1";
5796 // gen_helper_mtc0_performance2(cpu_env, arg);
5797 rn
= "Performance2";
5800 // gen_helper_mtc0_performance3(cpu_env, arg);
5801 rn
= "Performance3";
5804 // gen_helper_mtc0_performance4(cpu_env, arg);
5805 rn
= "Performance4";
5808 // gen_helper_mtc0_performance5(cpu_env, arg);
5809 rn
= "Performance5";
5812 // gen_helper_mtc0_performance6(cpu_env, arg);
5813 rn
= "Performance6";
5816 // gen_helper_mtc0_performance7(cpu_env, arg);
5817 rn
= "Performance7";
5843 gen_helper_mtc0_taglo(cpu_env
, arg
);
5850 gen_helper_mtc0_datalo(cpu_env
, arg
);
5863 gen_helper_mtc0_taghi(cpu_env
, arg
);
5870 gen_helper_mtc0_datahi(cpu_env
, arg
);
5881 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5892 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5898 /* Stop translation as we may have switched the execution mode */
5899 ctx
->bstate
= BS_STOP
;
5904 (void)rn
; /* avoid a compiler warning */
5905 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5906 /* For simplicity assume that all writes can cause interrupts. */
5909 ctx
->bstate
= BS_STOP
;
5914 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5915 generate_exception(ctx
, EXCP_RI
);
5917 #endif /* TARGET_MIPS64 */
5919 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5920 int u
, int sel
, int h
)
5922 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5923 TCGv t0
= tcg_temp_local_new();
5925 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5926 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5927 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5928 tcg_gen_movi_tl(t0
, -1);
5929 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5930 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5931 tcg_gen_movi_tl(t0
, -1);
5937 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
5940 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
5950 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
5953 gen_helper_mftc0_tcbind(t0
, cpu_env
);
5956 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
5959 gen_helper_mftc0_tchalt(t0
, cpu_env
);
5962 gen_helper_mftc0_tccontext(t0
, cpu_env
);
5965 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
5968 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
5971 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5978 gen_helper_mftc0_entryhi(t0
, cpu_env
);
5981 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5987 gen_helper_mftc0_status(t0
, cpu_env
);
5990 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5996 gen_helper_mftc0_cause(t0
, cpu_env
);
6006 gen_helper_mftc0_epc(t0
, cpu_env
);
6016 gen_helper_mftc0_ebase(t0
, cpu_env
);
6026 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
6036 gen_helper_mftc0_debug(t0
, cpu_env
);
6039 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6044 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6046 } else switch (sel
) {
6047 /* GPR registers. */
6049 gen_helper_1e0i(mftgpr
, t0
, rt
);
6051 /* Auxiliary CPU registers */
6055 gen_helper_1e0i(mftlo
, t0
, 0);
6058 gen_helper_1e0i(mfthi
, t0
, 0);
6061 gen_helper_1e0i(mftacx
, t0
, 0);
6064 gen_helper_1e0i(mftlo
, t0
, 1);
6067 gen_helper_1e0i(mfthi
, t0
, 1);
6070 gen_helper_1e0i(mftacx
, t0
, 1);
6073 gen_helper_1e0i(mftlo
, t0
, 2);
6076 gen_helper_1e0i(mfthi
, t0
, 2);
6079 gen_helper_1e0i(mftacx
, t0
, 2);
6082 gen_helper_1e0i(mftlo
, t0
, 3);
6085 gen_helper_1e0i(mfthi
, t0
, 3);
6088 gen_helper_1e0i(mftacx
, t0
, 3);
6091 gen_helper_mftdsp(t0
, cpu_env
);
6097 /* Floating point (COP1). */
6099 /* XXX: For now we support only a single FPU context. */
6101 TCGv_i32 fp0
= tcg_temp_new_i32();
6103 gen_load_fpr32(fp0
, rt
);
6104 tcg_gen_ext_i32_tl(t0
, fp0
);
6105 tcg_temp_free_i32(fp0
);
6107 TCGv_i32 fp0
= tcg_temp_new_i32();
6109 gen_load_fpr32h(fp0
, rt
);
6110 tcg_gen_ext_i32_tl(t0
, fp0
);
6111 tcg_temp_free_i32(fp0
);
6115 /* XXX: For now we support only a single FPU context. */
6116 gen_helper_1e0i(cfc1
, t0
, rt
);
6118 /* COP2: Not implemented. */
6125 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6126 gen_store_gpr(t0
, rd
);
6132 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6133 generate_exception(ctx
, EXCP_RI
);
6136 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
6137 int u
, int sel
, int h
)
6139 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
6140 TCGv t0
= tcg_temp_local_new();
6142 gen_load_gpr(t0
, rt
);
6143 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
6144 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
6145 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
6147 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
6148 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
6155 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
6158 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
6168 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
6171 gen_helper_mttc0_tcbind(cpu_env
, t0
);
6174 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
6177 gen_helper_mttc0_tchalt(cpu_env
, t0
);
6180 gen_helper_mttc0_tccontext(cpu_env
, t0
);
6183 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
6186 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
6189 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6196 gen_helper_mttc0_entryhi(cpu_env
, t0
);
6199 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6205 gen_helper_mttc0_status(cpu_env
, t0
);
6208 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6214 gen_helper_mttc0_cause(cpu_env
, t0
);
6224 gen_helper_mttc0_ebase(cpu_env
, t0
);
6234 gen_helper_mttc0_debug(cpu_env
, t0
);
6237 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6242 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6244 } else switch (sel
) {
6245 /* GPR registers. */
6247 gen_helper_0e1i(mttgpr
, t0
, rd
);
6249 /* Auxiliary CPU registers */
6253 gen_helper_0e1i(mttlo
, t0
, 0);
6256 gen_helper_0e1i(mtthi
, t0
, 0);
6259 gen_helper_0e1i(mttacx
, t0
, 0);
6262 gen_helper_0e1i(mttlo
, t0
, 1);
6265 gen_helper_0e1i(mtthi
, t0
, 1);
6268 gen_helper_0e1i(mttacx
, t0
, 1);
6271 gen_helper_0e1i(mttlo
, t0
, 2);
6274 gen_helper_0e1i(mtthi
, t0
, 2);
6277 gen_helper_0e1i(mttacx
, t0
, 2);
6280 gen_helper_0e1i(mttlo
, t0
, 3);
6283 gen_helper_0e1i(mtthi
, t0
, 3);
6286 gen_helper_0e1i(mttacx
, t0
, 3);
6289 gen_helper_mttdsp(cpu_env
, t0
);
6295 /* Floating point (COP1). */
6297 /* XXX: For now we support only a single FPU context. */
6299 TCGv_i32 fp0
= tcg_temp_new_i32();
6301 tcg_gen_trunc_tl_i32(fp0
, t0
);
6302 gen_store_fpr32(fp0
, rd
);
6303 tcg_temp_free_i32(fp0
);
6305 TCGv_i32 fp0
= tcg_temp_new_i32();
6307 tcg_gen_trunc_tl_i32(fp0
, t0
);
6308 gen_store_fpr32h(fp0
, rd
);
6309 tcg_temp_free_i32(fp0
);
6313 /* XXX: For now we support only a single FPU context. */
6314 gen_helper_0e1i(ctc1
, t0
, rd
);
6316 /* COP2: Not implemented. */
6323 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6329 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6330 generate_exception(ctx
, EXCP_RI
);
6333 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
6335 const char *opn
= "ldst";
6337 check_cp0_enabled(ctx
);
6344 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6349 TCGv t0
= tcg_temp_new();
6351 gen_load_gpr(t0
, rt
);
6352 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6357 #if defined(TARGET_MIPS64)
6359 check_insn(env
, ctx
, ISA_MIPS3
);
6364 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6368 check_insn(env
, ctx
, ISA_MIPS3
);
6370 TCGv t0
= tcg_temp_new();
6372 gen_load_gpr(t0
, rt
);
6373 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6380 check_insn(env
, ctx
, ASE_MT
);
6385 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
6386 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6390 check_insn(env
, ctx
, ASE_MT
);
6391 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
6392 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6397 if (!env
->tlb
->helper_tlbwi
)
6399 gen_helper_tlbwi(cpu_env
);
6403 if (!env
->tlb
->helper_tlbwr
)
6405 gen_helper_tlbwr(cpu_env
);
6409 if (!env
->tlb
->helper_tlbp
)
6411 gen_helper_tlbp(cpu_env
);
6415 if (!env
->tlb
->helper_tlbr
)
6417 gen_helper_tlbr(cpu_env
);
6421 check_insn(env
, ctx
, ISA_MIPS2
);
6422 gen_helper_eret(cpu_env
);
6423 ctx
->bstate
= BS_EXCP
;
6427 check_insn(env
, ctx
, ISA_MIPS32
);
6428 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6430 generate_exception(ctx
, EXCP_RI
);
6432 gen_helper_deret(cpu_env
);
6433 ctx
->bstate
= BS_EXCP
;
6438 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
6439 /* If we get an exception, we want to restart at next instruction */
6441 save_cpu_state(ctx
, 1);
6443 gen_helper_wait(cpu_env
);
6444 ctx
->bstate
= BS_EXCP
;
6449 generate_exception(ctx
, EXCP_RI
);
6452 (void)opn
; /* avoid a compiler warning */
6453 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6455 #endif /* !CONFIG_USER_ONLY */
6457 /* CP1 Branches (before delay slot) */
6458 static void gen_compute_branch1 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op
,
6459 int32_t cc
, int32_t offset
)
6461 target_ulong btarget
;
6462 const char *opn
= "cp1 cond branch";
6463 TCGv_i32 t0
= tcg_temp_new_i32();
6466 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
6468 btarget
= ctx
->pc
+ 4 + offset
;
6472 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6473 tcg_gen_not_i32(t0
, t0
);
6474 tcg_gen_andi_i32(t0
, t0
, 1);
6475 tcg_gen_extu_i32_tl(bcond
, t0
);
6479 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6480 tcg_gen_not_i32(t0
, t0
);
6481 tcg_gen_andi_i32(t0
, t0
, 1);
6482 tcg_gen_extu_i32_tl(bcond
, t0
);
6486 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6487 tcg_gen_andi_i32(t0
, t0
, 1);
6488 tcg_gen_extu_i32_tl(bcond
, t0
);
6492 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6493 tcg_gen_andi_i32(t0
, t0
, 1);
6494 tcg_gen_extu_i32_tl(bcond
, t0
);
6497 ctx
->hflags
|= MIPS_HFLAG_BL
;
6501 TCGv_i32 t1
= tcg_temp_new_i32();
6502 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6503 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6504 tcg_gen_nand_i32(t0
, t0
, t1
);
6505 tcg_temp_free_i32(t1
);
6506 tcg_gen_andi_i32(t0
, t0
, 1);
6507 tcg_gen_extu_i32_tl(bcond
, t0
);
6513 TCGv_i32 t1
= tcg_temp_new_i32();
6514 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6515 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6516 tcg_gen_or_i32(t0
, t0
, t1
);
6517 tcg_temp_free_i32(t1
);
6518 tcg_gen_andi_i32(t0
, t0
, 1);
6519 tcg_gen_extu_i32_tl(bcond
, t0
);
6525 TCGv_i32 t1
= tcg_temp_new_i32();
6526 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6527 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6528 tcg_gen_and_i32(t0
, t0
, t1
);
6529 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6530 tcg_gen_and_i32(t0
, t0
, t1
);
6531 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6532 tcg_gen_nand_i32(t0
, t0
, t1
);
6533 tcg_temp_free_i32(t1
);
6534 tcg_gen_andi_i32(t0
, t0
, 1);
6535 tcg_gen_extu_i32_tl(bcond
, t0
);
6541 TCGv_i32 t1
= tcg_temp_new_i32();
6542 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6543 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6544 tcg_gen_or_i32(t0
, t0
, t1
);
6545 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6546 tcg_gen_or_i32(t0
, t0
, t1
);
6547 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6548 tcg_gen_or_i32(t0
, t0
, t1
);
6549 tcg_temp_free_i32(t1
);
6550 tcg_gen_andi_i32(t0
, t0
, 1);
6551 tcg_gen_extu_i32_tl(bcond
, t0
);
6555 ctx
->hflags
|= MIPS_HFLAG_BC
;
6559 generate_exception (ctx
, EXCP_RI
);
6562 (void)opn
; /* avoid a compiler warning */
6563 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
6564 ctx
->hflags
, btarget
);
6565 ctx
->btarget
= btarget
;
6568 tcg_temp_free_i32(t0
);
6571 /* Coprocessor 1 (FPU) */
6573 #define FOP(func, fmt) (((fmt) << 21) | (func))
6576 OPC_ADD_S
= FOP(0, FMT_S
),
6577 OPC_SUB_S
= FOP(1, FMT_S
),
6578 OPC_MUL_S
= FOP(2, FMT_S
),
6579 OPC_DIV_S
= FOP(3, FMT_S
),
6580 OPC_SQRT_S
= FOP(4, FMT_S
),
6581 OPC_ABS_S
= FOP(5, FMT_S
),
6582 OPC_MOV_S
= FOP(6, FMT_S
),
6583 OPC_NEG_S
= FOP(7, FMT_S
),
6584 OPC_ROUND_L_S
= FOP(8, FMT_S
),
6585 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
6586 OPC_CEIL_L_S
= FOP(10, FMT_S
),
6587 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
6588 OPC_ROUND_W_S
= FOP(12, FMT_S
),
6589 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
6590 OPC_CEIL_W_S
= FOP(14, FMT_S
),
6591 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
6592 OPC_MOVCF_S
= FOP(17, FMT_S
),
6593 OPC_MOVZ_S
= FOP(18, FMT_S
),
6594 OPC_MOVN_S
= FOP(19, FMT_S
),
6595 OPC_RECIP_S
= FOP(21, FMT_S
),
6596 OPC_RSQRT_S
= FOP(22, FMT_S
),
6597 OPC_RECIP2_S
= FOP(28, FMT_S
),
6598 OPC_RECIP1_S
= FOP(29, FMT_S
),
6599 OPC_RSQRT1_S
= FOP(30, FMT_S
),
6600 OPC_RSQRT2_S
= FOP(31, FMT_S
),
6601 OPC_CVT_D_S
= FOP(33, FMT_S
),
6602 OPC_CVT_W_S
= FOP(36, FMT_S
),
6603 OPC_CVT_L_S
= FOP(37, FMT_S
),
6604 OPC_CVT_PS_S
= FOP(38, FMT_S
),
6605 OPC_CMP_F_S
= FOP (48, FMT_S
),
6606 OPC_CMP_UN_S
= FOP (49, FMT_S
),
6607 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
6608 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
6609 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
6610 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
6611 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
6612 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
6613 OPC_CMP_SF_S
= FOP (56, FMT_S
),
6614 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
6615 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
6616 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
6617 OPC_CMP_LT_S
= FOP (60, FMT_S
),
6618 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
6619 OPC_CMP_LE_S
= FOP (62, FMT_S
),
6620 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
6622 OPC_ADD_D
= FOP(0, FMT_D
),
6623 OPC_SUB_D
= FOP(1, FMT_D
),
6624 OPC_MUL_D
= FOP(2, FMT_D
),
6625 OPC_DIV_D
= FOP(3, FMT_D
),
6626 OPC_SQRT_D
= FOP(4, FMT_D
),
6627 OPC_ABS_D
= FOP(5, FMT_D
),
6628 OPC_MOV_D
= FOP(6, FMT_D
),
6629 OPC_NEG_D
= FOP(7, FMT_D
),
6630 OPC_ROUND_L_D
= FOP(8, FMT_D
),
6631 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
6632 OPC_CEIL_L_D
= FOP(10, FMT_D
),
6633 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
6634 OPC_ROUND_W_D
= FOP(12, FMT_D
),
6635 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
6636 OPC_CEIL_W_D
= FOP(14, FMT_D
),
6637 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
6638 OPC_MOVCF_D
= FOP(17, FMT_D
),
6639 OPC_MOVZ_D
= FOP(18, FMT_D
),
6640 OPC_MOVN_D
= FOP(19, FMT_D
),
6641 OPC_RECIP_D
= FOP(21, FMT_D
),
6642 OPC_RSQRT_D
= FOP(22, FMT_D
),
6643 OPC_RECIP2_D
= FOP(28, FMT_D
),
6644 OPC_RECIP1_D
= FOP(29, FMT_D
),
6645 OPC_RSQRT1_D
= FOP(30, FMT_D
),
6646 OPC_RSQRT2_D
= FOP(31, FMT_D
),
6647 OPC_CVT_S_D
= FOP(32, FMT_D
),
6648 OPC_CVT_W_D
= FOP(36, FMT_D
),
6649 OPC_CVT_L_D
= FOP(37, FMT_D
),
6650 OPC_CMP_F_D
= FOP (48, FMT_D
),
6651 OPC_CMP_UN_D
= FOP (49, FMT_D
),
6652 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
6653 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
6654 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
6655 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
6656 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
6657 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
6658 OPC_CMP_SF_D
= FOP (56, FMT_D
),
6659 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
6660 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
6661 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
6662 OPC_CMP_LT_D
= FOP (60, FMT_D
),
6663 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
6664 OPC_CMP_LE_D
= FOP (62, FMT_D
),
6665 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
6667 OPC_CVT_S_W
= FOP(32, FMT_W
),
6668 OPC_CVT_D_W
= FOP(33, FMT_W
),
6669 OPC_CVT_S_L
= FOP(32, FMT_L
),
6670 OPC_CVT_D_L
= FOP(33, FMT_L
),
6671 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
6673 OPC_ADD_PS
= FOP(0, FMT_PS
),
6674 OPC_SUB_PS
= FOP(1, FMT_PS
),
6675 OPC_MUL_PS
= FOP(2, FMT_PS
),
6676 OPC_DIV_PS
= FOP(3, FMT_PS
),
6677 OPC_ABS_PS
= FOP(5, FMT_PS
),
6678 OPC_MOV_PS
= FOP(6, FMT_PS
),
6679 OPC_NEG_PS
= FOP(7, FMT_PS
),
6680 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
6681 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
6682 OPC_MOVN_PS
= FOP(19, FMT_PS
),
6683 OPC_ADDR_PS
= FOP(24, FMT_PS
),
6684 OPC_MULR_PS
= FOP(26, FMT_PS
),
6685 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
6686 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
6687 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
6688 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
6690 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
6691 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
6692 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
6693 OPC_PLL_PS
= FOP(44, FMT_PS
),
6694 OPC_PLU_PS
= FOP(45, FMT_PS
),
6695 OPC_PUL_PS
= FOP(46, FMT_PS
),
6696 OPC_PUU_PS
= FOP(47, FMT_PS
),
6697 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
6698 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
6699 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
6700 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
6701 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
6702 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
6703 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
6704 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
6705 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
6706 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
6707 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
6708 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
6709 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
6710 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
6711 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
6712 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
6715 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
6717 const char *opn
= "cp1 move";
6718 TCGv t0
= tcg_temp_new();
6723 TCGv_i32 fp0
= tcg_temp_new_i32();
6725 gen_load_fpr32(fp0
, fs
);
6726 tcg_gen_ext_i32_tl(t0
, fp0
);
6727 tcg_temp_free_i32(fp0
);
6729 gen_store_gpr(t0
, rt
);
6733 gen_load_gpr(t0
, rt
);
6735 TCGv_i32 fp0
= tcg_temp_new_i32();
6737 tcg_gen_trunc_tl_i32(fp0
, t0
);
6738 gen_store_fpr32(fp0
, fs
);
6739 tcg_temp_free_i32(fp0
);
6744 gen_helper_1e0i(cfc1
, t0
, fs
);
6745 gen_store_gpr(t0
, rt
);
6749 gen_load_gpr(t0
, rt
);
6750 gen_helper_0e1i(ctc1
, t0
, fs
);
6753 #if defined(TARGET_MIPS64)
6755 gen_load_fpr64(ctx
, t0
, fs
);
6756 gen_store_gpr(t0
, rt
);
6760 gen_load_gpr(t0
, rt
);
6761 gen_store_fpr64(ctx
, t0
, fs
);
6767 TCGv_i32 fp0
= tcg_temp_new_i32();
6769 gen_load_fpr32h(fp0
, fs
);
6770 tcg_gen_ext_i32_tl(t0
, fp0
);
6771 tcg_temp_free_i32(fp0
);
6773 gen_store_gpr(t0
, rt
);
6777 gen_load_gpr(t0
, rt
);
6779 TCGv_i32 fp0
= tcg_temp_new_i32();
6781 tcg_gen_trunc_tl_i32(fp0
, t0
);
6782 gen_store_fpr32h(fp0
, fs
);
6783 tcg_temp_free_i32(fp0
);
6789 generate_exception (ctx
, EXCP_RI
);
6792 (void)opn
; /* avoid a compiler warning */
6793 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6799 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6815 l1
= gen_new_label();
6816 t0
= tcg_temp_new_i32();
6817 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6818 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6819 tcg_temp_free_i32(t0
);
6821 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6823 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6828 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6831 TCGv_i32 t0
= tcg_temp_new_i32();
6832 int l1
= gen_new_label();
6839 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6840 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6841 gen_load_fpr32(t0
, fs
);
6842 gen_store_fpr32(t0
, fd
);
6844 tcg_temp_free_i32(t0
);
6847 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6850 TCGv_i32 t0
= tcg_temp_new_i32();
6852 int l1
= gen_new_label();
6859 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6860 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6861 tcg_temp_free_i32(t0
);
6862 fp0
= tcg_temp_new_i64();
6863 gen_load_fpr64(ctx
, fp0
, fs
);
6864 gen_store_fpr64(ctx
, fp0
, fd
);
6865 tcg_temp_free_i64(fp0
);
6869 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6872 TCGv_i32 t0
= tcg_temp_new_i32();
6873 int l1
= gen_new_label();
6874 int l2
= gen_new_label();
6881 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6882 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6883 gen_load_fpr32(t0
, fs
);
6884 gen_store_fpr32(t0
, fd
);
6887 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6888 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6889 gen_load_fpr32h(t0
, fs
);
6890 gen_store_fpr32h(t0
, fd
);
6891 tcg_temp_free_i32(t0
);
6896 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6897 int ft
, int fs
, int fd
, int cc
)
6899 const char *opn
= "farith";
6900 const char *condnames
[] = {
6918 const char *condnames_abs
[] = {
6936 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6937 uint32_t func
= ctx
->opcode
& 0x3f;
6942 TCGv_i32 fp0
= tcg_temp_new_i32();
6943 TCGv_i32 fp1
= tcg_temp_new_i32();
6945 gen_load_fpr32(fp0
, fs
);
6946 gen_load_fpr32(fp1
, ft
);
6947 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
6948 tcg_temp_free_i32(fp1
);
6949 gen_store_fpr32(fp0
, fd
);
6950 tcg_temp_free_i32(fp0
);
6957 TCGv_i32 fp0
= tcg_temp_new_i32();
6958 TCGv_i32 fp1
= tcg_temp_new_i32();
6960 gen_load_fpr32(fp0
, fs
);
6961 gen_load_fpr32(fp1
, ft
);
6962 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
6963 tcg_temp_free_i32(fp1
);
6964 gen_store_fpr32(fp0
, fd
);
6965 tcg_temp_free_i32(fp0
);
6972 TCGv_i32 fp0
= tcg_temp_new_i32();
6973 TCGv_i32 fp1
= tcg_temp_new_i32();
6975 gen_load_fpr32(fp0
, fs
);
6976 gen_load_fpr32(fp1
, ft
);
6977 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
6978 tcg_temp_free_i32(fp1
);
6979 gen_store_fpr32(fp0
, fd
);
6980 tcg_temp_free_i32(fp0
);
6987 TCGv_i32 fp0
= tcg_temp_new_i32();
6988 TCGv_i32 fp1
= tcg_temp_new_i32();
6990 gen_load_fpr32(fp0
, fs
);
6991 gen_load_fpr32(fp1
, ft
);
6992 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
6993 tcg_temp_free_i32(fp1
);
6994 gen_store_fpr32(fp0
, fd
);
6995 tcg_temp_free_i32(fp0
);
7002 TCGv_i32 fp0
= tcg_temp_new_i32();
7004 gen_load_fpr32(fp0
, fs
);
7005 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
7006 gen_store_fpr32(fp0
, fd
);
7007 tcg_temp_free_i32(fp0
);
7013 TCGv_i32 fp0
= tcg_temp_new_i32();
7015 gen_load_fpr32(fp0
, fs
);
7016 gen_helper_float_abs_s(fp0
, fp0
);
7017 gen_store_fpr32(fp0
, fd
);
7018 tcg_temp_free_i32(fp0
);
7024 TCGv_i32 fp0
= tcg_temp_new_i32();
7026 gen_load_fpr32(fp0
, fs
);
7027 gen_store_fpr32(fp0
, fd
);
7028 tcg_temp_free_i32(fp0
);
7034 TCGv_i32 fp0
= tcg_temp_new_i32();
7036 gen_load_fpr32(fp0
, fs
);
7037 gen_helper_float_chs_s(fp0
, fp0
);
7038 gen_store_fpr32(fp0
, fd
);
7039 tcg_temp_free_i32(fp0
);
7044 check_cp1_64bitmode(ctx
);
7046 TCGv_i32 fp32
= tcg_temp_new_i32();
7047 TCGv_i64 fp64
= tcg_temp_new_i64();
7049 gen_load_fpr32(fp32
, fs
);
7050 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
7051 tcg_temp_free_i32(fp32
);
7052 gen_store_fpr64(ctx
, fp64
, fd
);
7053 tcg_temp_free_i64(fp64
);
7058 check_cp1_64bitmode(ctx
);
7060 TCGv_i32 fp32
= tcg_temp_new_i32();
7061 TCGv_i64 fp64
= tcg_temp_new_i64();
7063 gen_load_fpr32(fp32
, fs
);
7064 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
7065 tcg_temp_free_i32(fp32
);
7066 gen_store_fpr64(ctx
, fp64
, fd
);
7067 tcg_temp_free_i64(fp64
);
7072 check_cp1_64bitmode(ctx
);
7074 TCGv_i32 fp32
= tcg_temp_new_i32();
7075 TCGv_i64 fp64
= tcg_temp_new_i64();
7077 gen_load_fpr32(fp32
, fs
);
7078 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
7079 tcg_temp_free_i32(fp32
);
7080 gen_store_fpr64(ctx
, fp64
, fd
);
7081 tcg_temp_free_i64(fp64
);
7086 check_cp1_64bitmode(ctx
);
7088 TCGv_i32 fp32
= tcg_temp_new_i32();
7089 TCGv_i64 fp64
= tcg_temp_new_i64();
7091 gen_load_fpr32(fp32
, fs
);
7092 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
7093 tcg_temp_free_i32(fp32
);
7094 gen_store_fpr64(ctx
, fp64
, fd
);
7095 tcg_temp_free_i64(fp64
);
7101 TCGv_i32 fp0
= tcg_temp_new_i32();
7103 gen_load_fpr32(fp0
, fs
);
7104 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
7105 gen_store_fpr32(fp0
, fd
);
7106 tcg_temp_free_i32(fp0
);
7112 TCGv_i32 fp0
= tcg_temp_new_i32();
7114 gen_load_fpr32(fp0
, fs
);
7115 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
7116 gen_store_fpr32(fp0
, fd
);
7117 tcg_temp_free_i32(fp0
);
7123 TCGv_i32 fp0
= tcg_temp_new_i32();
7125 gen_load_fpr32(fp0
, fs
);
7126 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
7127 gen_store_fpr32(fp0
, fd
);
7128 tcg_temp_free_i32(fp0
);
7134 TCGv_i32 fp0
= tcg_temp_new_i32();
7136 gen_load_fpr32(fp0
, fs
);
7137 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
7138 gen_store_fpr32(fp0
, fd
);
7139 tcg_temp_free_i32(fp0
);
7144 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7149 int l1
= gen_new_label();
7153 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7155 fp0
= tcg_temp_new_i32();
7156 gen_load_fpr32(fp0
, fs
);
7157 gen_store_fpr32(fp0
, fd
);
7158 tcg_temp_free_i32(fp0
);
7165 int l1
= gen_new_label();
7169 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7170 fp0
= tcg_temp_new_i32();
7171 gen_load_fpr32(fp0
, fs
);
7172 gen_store_fpr32(fp0
, fd
);
7173 tcg_temp_free_i32(fp0
);
7182 TCGv_i32 fp0
= tcg_temp_new_i32();
7184 gen_load_fpr32(fp0
, fs
);
7185 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
7186 gen_store_fpr32(fp0
, fd
);
7187 tcg_temp_free_i32(fp0
);
7194 TCGv_i32 fp0
= tcg_temp_new_i32();
7196 gen_load_fpr32(fp0
, fs
);
7197 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
7198 gen_store_fpr32(fp0
, fd
);
7199 tcg_temp_free_i32(fp0
);
7204 check_cp1_64bitmode(ctx
);
7206 TCGv_i32 fp0
= tcg_temp_new_i32();
7207 TCGv_i32 fp1
= tcg_temp_new_i32();
7209 gen_load_fpr32(fp0
, fs
);
7210 gen_load_fpr32(fp1
, ft
);
7211 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
7212 tcg_temp_free_i32(fp1
);
7213 gen_store_fpr32(fp0
, fd
);
7214 tcg_temp_free_i32(fp0
);
7219 check_cp1_64bitmode(ctx
);
7221 TCGv_i32 fp0
= tcg_temp_new_i32();
7223 gen_load_fpr32(fp0
, fs
);
7224 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
7225 gen_store_fpr32(fp0
, fd
);
7226 tcg_temp_free_i32(fp0
);
7231 check_cp1_64bitmode(ctx
);
7233 TCGv_i32 fp0
= tcg_temp_new_i32();
7235 gen_load_fpr32(fp0
, fs
);
7236 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
7237 gen_store_fpr32(fp0
, fd
);
7238 tcg_temp_free_i32(fp0
);
7243 check_cp1_64bitmode(ctx
);
7245 TCGv_i32 fp0
= tcg_temp_new_i32();
7246 TCGv_i32 fp1
= tcg_temp_new_i32();
7248 gen_load_fpr32(fp0
, fs
);
7249 gen_load_fpr32(fp1
, ft
);
7250 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
7251 tcg_temp_free_i32(fp1
);
7252 gen_store_fpr32(fp0
, fd
);
7253 tcg_temp_free_i32(fp0
);
7258 check_cp1_registers(ctx
, fd
);
7260 TCGv_i32 fp32
= tcg_temp_new_i32();
7261 TCGv_i64 fp64
= tcg_temp_new_i64();
7263 gen_load_fpr32(fp32
, fs
);
7264 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
7265 tcg_temp_free_i32(fp32
);
7266 gen_store_fpr64(ctx
, fp64
, fd
);
7267 tcg_temp_free_i64(fp64
);
7273 TCGv_i32 fp0
= tcg_temp_new_i32();
7275 gen_load_fpr32(fp0
, fs
);
7276 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
7277 gen_store_fpr32(fp0
, fd
);
7278 tcg_temp_free_i32(fp0
);
7283 check_cp1_64bitmode(ctx
);
7285 TCGv_i32 fp32
= tcg_temp_new_i32();
7286 TCGv_i64 fp64
= tcg_temp_new_i64();
7288 gen_load_fpr32(fp32
, fs
);
7289 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
7290 tcg_temp_free_i32(fp32
);
7291 gen_store_fpr64(ctx
, fp64
, fd
);
7292 tcg_temp_free_i64(fp64
);
7297 check_cp1_64bitmode(ctx
);
7299 TCGv_i64 fp64
= tcg_temp_new_i64();
7300 TCGv_i32 fp32_0
= tcg_temp_new_i32();
7301 TCGv_i32 fp32_1
= tcg_temp_new_i32();
7303 gen_load_fpr32(fp32_0
, fs
);
7304 gen_load_fpr32(fp32_1
, ft
);
7305 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
7306 tcg_temp_free_i32(fp32_1
);
7307 tcg_temp_free_i32(fp32_0
);
7308 gen_store_fpr64(ctx
, fp64
, fd
);
7309 tcg_temp_free_i64(fp64
);
7322 case OPC_CMP_NGLE_S
:
7329 if (ctx
->opcode
& (1 << 6)) {
7330 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
7331 opn
= condnames_abs
[func
-48];
7333 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
7334 opn
= condnames
[func
-48];
7338 check_cp1_registers(ctx
, fs
| ft
| fd
);
7340 TCGv_i64 fp0
= tcg_temp_new_i64();
7341 TCGv_i64 fp1
= tcg_temp_new_i64();
7343 gen_load_fpr64(ctx
, fp0
, fs
);
7344 gen_load_fpr64(ctx
, fp1
, ft
);
7345 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
7346 tcg_temp_free_i64(fp1
);
7347 gen_store_fpr64(ctx
, fp0
, fd
);
7348 tcg_temp_free_i64(fp0
);
7354 check_cp1_registers(ctx
, fs
| ft
| fd
);
7356 TCGv_i64 fp0
= tcg_temp_new_i64();
7357 TCGv_i64 fp1
= tcg_temp_new_i64();
7359 gen_load_fpr64(ctx
, fp0
, fs
);
7360 gen_load_fpr64(ctx
, fp1
, ft
);
7361 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
7362 tcg_temp_free_i64(fp1
);
7363 gen_store_fpr64(ctx
, fp0
, fd
);
7364 tcg_temp_free_i64(fp0
);
7370 check_cp1_registers(ctx
, fs
| ft
| fd
);
7372 TCGv_i64 fp0
= tcg_temp_new_i64();
7373 TCGv_i64 fp1
= tcg_temp_new_i64();
7375 gen_load_fpr64(ctx
, fp0
, fs
);
7376 gen_load_fpr64(ctx
, fp1
, ft
);
7377 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
7378 tcg_temp_free_i64(fp1
);
7379 gen_store_fpr64(ctx
, fp0
, fd
);
7380 tcg_temp_free_i64(fp0
);
7386 check_cp1_registers(ctx
, fs
| ft
| fd
);
7388 TCGv_i64 fp0
= tcg_temp_new_i64();
7389 TCGv_i64 fp1
= tcg_temp_new_i64();
7391 gen_load_fpr64(ctx
, fp0
, fs
);
7392 gen_load_fpr64(ctx
, fp1
, ft
);
7393 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
7394 tcg_temp_free_i64(fp1
);
7395 gen_store_fpr64(ctx
, fp0
, fd
);
7396 tcg_temp_free_i64(fp0
);
7402 check_cp1_registers(ctx
, fs
| fd
);
7404 TCGv_i64 fp0
= tcg_temp_new_i64();
7406 gen_load_fpr64(ctx
, fp0
, fs
);
7407 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
7408 gen_store_fpr64(ctx
, fp0
, fd
);
7409 tcg_temp_free_i64(fp0
);
7414 check_cp1_registers(ctx
, fs
| fd
);
7416 TCGv_i64 fp0
= tcg_temp_new_i64();
7418 gen_load_fpr64(ctx
, fp0
, fs
);
7419 gen_helper_float_abs_d(fp0
, fp0
);
7420 gen_store_fpr64(ctx
, fp0
, fd
);
7421 tcg_temp_free_i64(fp0
);
7426 check_cp1_registers(ctx
, fs
| fd
);
7428 TCGv_i64 fp0
= tcg_temp_new_i64();
7430 gen_load_fpr64(ctx
, fp0
, fs
);
7431 gen_store_fpr64(ctx
, fp0
, fd
);
7432 tcg_temp_free_i64(fp0
);
7437 check_cp1_registers(ctx
, fs
| fd
);
7439 TCGv_i64 fp0
= tcg_temp_new_i64();
7441 gen_load_fpr64(ctx
, fp0
, fs
);
7442 gen_helper_float_chs_d(fp0
, fp0
);
7443 gen_store_fpr64(ctx
, fp0
, fd
);
7444 tcg_temp_free_i64(fp0
);
7449 check_cp1_64bitmode(ctx
);
7451 TCGv_i64 fp0
= tcg_temp_new_i64();
7453 gen_load_fpr64(ctx
, fp0
, fs
);
7454 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
7455 gen_store_fpr64(ctx
, fp0
, fd
);
7456 tcg_temp_free_i64(fp0
);
7461 check_cp1_64bitmode(ctx
);
7463 TCGv_i64 fp0
= tcg_temp_new_i64();
7465 gen_load_fpr64(ctx
, fp0
, fs
);
7466 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
7467 gen_store_fpr64(ctx
, fp0
, fd
);
7468 tcg_temp_free_i64(fp0
);
7473 check_cp1_64bitmode(ctx
);
7475 TCGv_i64 fp0
= tcg_temp_new_i64();
7477 gen_load_fpr64(ctx
, fp0
, fs
);
7478 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
7479 gen_store_fpr64(ctx
, fp0
, fd
);
7480 tcg_temp_free_i64(fp0
);
7485 check_cp1_64bitmode(ctx
);
7487 TCGv_i64 fp0
= tcg_temp_new_i64();
7489 gen_load_fpr64(ctx
, fp0
, fs
);
7490 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
7491 gen_store_fpr64(ctx
, fp0
, fd
);
7492 tcg_temp_free_i64(fp0
);
7497 check_cp1_registers(ctx
, fs
);
7499 TCGv_i32 fp32
= tcg_temp_new_i32();
7500 TCGv_i64 fp64
= tcg_temp_new_i64();
7502 gen_load_fpr64(ctx
, fp64
, fs
);
7503 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
7504 tcg_temp_free_i64(fp64
);
7505 gen_store_fpr32(fp32
, fd
);
7506 tcg_temp_free_i32(fp32
);
7511 check_cp1_registers(ctx
, fs
);
7513 TCGv_i32 fp32
= tcg_temp_new_i32();
7514 TCGv_i64 fp64
= tcg_temp_new_i64();
7516 gen_load_fpr64(ctx
, fp64
, fs
);
7517 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
7518 tcg_temp_free_i64(fp64
);
7519 gen_store_fpr32(fp32
, fd
);
7520 tcg_temp_free_i32(fp32
);
7525 check_cp1_registers(ctx
, fs
);
7527 TCGv_i32 fp32
= tcg_temp_new_i32();
7528 TCGv_i64 fp64
= tcg_temp_new_i64();
7530 gen_load_fpr64(ctx
, fp64
, fs
);
7531 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
7532 tcg_temp_free_i64(fp64
);
7533 gen_store_fpr32(fp32
, fd
);
7534 tcg_temp_free_i32(fp32
);
7539 check_cp1_registers(ctx
, fs
);
7541 TCGv_i32 fp32
= tcg_temp_new_i32();
7542 TCGv_i64 fp64
= tcg_temp_new_i64();
7544 gen_load_fpr64(ctx
, fp64
, fs
);
7545 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
7546 tcg_temp_free_i64(fp64
);
7547 gen_store_fpr32(fp32
, fd
);
7548 tcg_temp_free_i32(fp32
);
7553 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7558 int l1
= gen_new_label();
7562 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7564 fp0
= tcg_temp_new_i64();
7565 gen_load_fpr64(ctx
, fp0
, fs
);
7566 gen_store_fpr64(ctx
, fp0
, fd
);
7567 tcg_temp_free_i64(fp0
);
7574 int l1
= gen_new_label();
7578 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7579 fp0
= tcg_temp_new_i64();
7580 gen_load_fpr64(ctx
, fp0
, fs
);
7581 gen_store_fpr64(ctx
, fp0
, fd
);
7582 tcg_temp_free_i64(fp0
);
7589 check_cp1_64bitmode(ctx
);
7591 TCGv_i64 fp0
= tcg_temp_new_i64();
7593 gen_load_fpr64(ctx
, fp0
, fs
);
7594 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
7595 gen_store_fpr64(ctx
, fp0
, fd
);
7596 tcg_temp_free_i64(fp0
);
7601 check_cp1_64bitmode(ctx
);
7603 TCGv_i64 fp0
= tcg_temp_new_i64();
7605 gen_load_fpr64(ctx
, fp0
, fs
);
7606 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
7607 gen_store_fpr64(ctx
, fp0
, fd
);
7608 tcg_temp_free_i64(fp0
);
7613 check_cp1_64bitmode(ctx
);
7615 TCGv_i64 fp0
= tcg_temp_new_i64();
7616 TCGv_i64 fp1
= tcg_temp_new_i64();
7618 gen_load_fpr64(ctx
, fp0
, fs
);
7619 gen_load_fpr64(ctx
, fp1
, ft
);
7620 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
7621 tcg_temp_free_i64(fp1
);
7622 gen_store_fpr64(ctx
, fp0
, fd
);
7623 tcg_temp_free_i64(fp0
);
7628 check_cp1_64bitmode(ctx
);
7630 TCGv_i64 fp0
= tcg_temp_new_i64();
7632 gen_load_fpr64(ctx
, fp0
, fs
);
7633 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
7634 gen_store_fpr64(ctx
, fp0
, fd
);
7635 tcg_temp_free_i64(fp0
);
7640 check_cp1_64bitmode(ctx
);
7642 TCGv_i64 fp0
= tcg_temp_new_i64();
7644 gen_load_fpr64(ctx
, fp0
, fs
);
7645 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
7646 gen_store_fpr64(ctx
, fp0
, fd
);
7647 tcg_temp_free_i64(fp0
);
7652 check_cp1_64bitmode(ctx
);
7654 TCGv_i64 fp0
= tcg_temp_new_i64();
7655 TCGv_i64 fp1
= tcg_temp_new_i64();
7657 gen_load_fpr64(ctx
, fp0
, fs
);
7658 gen_load_fpr64(ctx
, fp1
, ft
);
7659 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
7660 tcg_temp_free_i64(fp1
);
7661 gen_store_fpr64(ctx
, fp0
, fd
);
7662 tcg_temp_free_i64(fp0
);
7675 case OPC_CMP_NGLE_D
:
7682 if (ctx
->opcode
& (1 << 6)) {
7683 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
7684 opn
= condnames_abs
[func
-48];
7686 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
7687 opn
= condnames
[func
-48];
7691 check_cp1_registers(ctx
, fs
);
7693 TCGv_i32 fp32
= tcg_temp_new_i32();
7694 TCGv_i64 fp64
= tcg_temp_new_i64();
7696 gen_load_fpr64(ctx
, fp64
, fs
);
7697 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
7698 tcg_temp_free_i64(fp64
);
7699 gen_store_fpr32(fp32
, fd
);
7700 tcg_temp_free_i32(fp32
);
7705 check_cp1_registers(ctx
, fs
);
7707 TCGv_i32 fp32
= tcg_temp_new_i32();
7708 TCGv_i64 fp64
= tcg_temp_new_i64();
7710 gen_load_fpr64(ctx
, fp64
, fs
);
7711 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
7712 tcg_temp_free_i64(fp64
);
7713 gen_store_fpr32(fp32
, fd
);
7714 tcg_temp_free_i32(fp32
);
7719 check_cp1_64bitmode(ctx
);
7721 TCGv_i64 fp0
= tcg_temp_new_i64();
7723 gen_load_fpr64(ctx
, fp0
, fs
);
7724 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
7725 gen_store_fpr64(ctx
, fp0
, fd
);
7726 tcg_temp_free_i64(fp0
);
7732 TCGv_i32 fp0
= tcg_temp_new_i32();
7734 gen_load_fpr32(fp0
, fs
);
7735 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
7736 gen_store_fpr32(fp0
, fd
);
7737 tcg_temp_free_i32(fp0
);
7742 check_cp1_registers(ctx
, fd
);
7744 TCGv_i32 fp32
= tcg_temp_new_i32();
7745 TCGv_i64 fp64
= tcg_temp_new_i64();
7747 gen_load_fpr32(fp32
, fs
);
7748 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
7749 tcg_temp_free_i32(fp32
);
7750 gen_store_fpr64(ctx
, fp64
, fd
);
7751 tcg_temp_free_i64(fp64
);
7756 check_cp1_64bitmode(ctx
);
7758 TCGv_i32 fp32
= tcg_temp_new_i32();
7759 TCGv_i64 fp64
= tcg_temp_new_i64();
7761 gen_load_fpr64(ctx
, fp64
, fs
);
7762 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
7763 tcg_temp_free_i64(fp64
);
7764 gen_store_fpr32(fp32
, fd
);
7765 tcg_temp_free_i32(fp32
);
7770 check_cp1_64bitmode(ctx
);
7772 TCGv_i64 fp0
= tcg_temp_new_i64();
7774 gen_load_fpr64(ctx
, fp0
, fs
);
7775 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
7776 gen_store_fpr64(ctx
, fp0
, fd
);
7777 tcg_temp_free_i64(fp0
);
7782 check_cp1_64bitmode(ctx
);
7784 TCGv_i64 fp0
= tcg_temp_new_i64();
7786 gen_load_fpr64(ctx
, fp0
, fs
);
7787 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
7788 gen_store_fpr64(ctx
, fp0
, fd
);
7789 tcg_temp_free_i64(fp0
);
7794 check_cp1_64bitmode(ctx
);
7796 TCGv_i64 fp0
= tcg_temp_new_i64();
7797 TCGv_i64 fp1
= tcg_temp_new_i64();
7799 gen_load_fpr64(ctx
, fp0
, fs
);
7800 gen_load_fpr64(ctx
, fp1
, ft
);
7801 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
7802 tcg_temp_free_i64(fp1
);
7803 gen_store_fpr64(ctx
, fp0
, fd
);
7804 tcg_temp_free_i64(fp0
);
7809 check_cp1_64bitmode(ctx
);
7811 TCGv_i64 fp0
= tcg_temp_new_i64();
7812 TCGv_i64 fp1
= tcg_temp_new_i64();
7814 gen_load_fpr64(ctx
, fp0
, fs
);
7815 gen_load_fpr64(ctx
, fp1
, ft
);
7816 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
7817 tcg_temp_free_i64(fp1
);
7818 gen_store_fpr64(ctx
, fp0
, fd
);
7819 tcg_temp_free_i64(fp0
);
7824 check_cp1_64bitmode(ctx
);
7826 TCGv_i64 fp0
= tcg_temp_new_i64();
7827 TCGv_i64 fp1
= tcg_temp_new_i64();
7829 gen_load_fpr64(ctx
, fp0
, fs
);
7830 gen_load_fpr64(ctx
, fp1
, ft
);
7831 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
7832 tcg_temp_free_i64(fp1
);
7833 gen_store_fpr64(ctx
, fp0
, fd
);
7834 tcg_temp_free_i64(fp0
);
7839 check_cp1_64bitmode(ctx
);
7841 TCGv_i64 fp0
= tcg_temp_new_i64();
7843 gen_load_fpr64(ctx
, fp0
, fs
);
7844 gen_helper_float_abs_ps(fp0
, fp0
);
7845 gen_store_fpr64(ctx
, fp0
, fd
);
7846 tcg_temp_free_i64(fp0
);
7851 check_cp1_64bitmode(ctx
);
7853 TCGv_i64 fp0
= tcg_temp_new_i64();
7855 gen_load_fpr64(ctx
, fp0
, fs
);
7856 gen_store_fpr64(ctx
, fp0
, fd
);
7857 tcg_temp_free_i64(fp0
);
7862 check_cp1_64bitmode(ctx
);
7864 TCGv_i64 fp0
= tcg_temp_new_i64();
7866 gen_load_fpr64(ctx
, fp0
, fs
);
7867 gen_helper_float_chs_ps(fp0
, fp0
);
7868 gen_store_fpr64(ctx
, fp0
, fd
);
7869 tcg_temp_free_i64(fp0
);
7874 check_cp1_64bitmode(ctx
);
7875 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7879 check_cp1_64bitmode(ctx
);
7881 int l1
= gen_new_label();
7885 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7886 fp0
= tcg_temp_new_i64();
7887 gen_load_fpr64(ctx
, fp0
, fs
);
7888 gen_store_fpr64(ctx
, fp0
, fd
);
7889 tcg_temp_free_i64(fp0
);
7895 check_cp1_64bitmode(ctx
);
7897 int l1
= gen_new_label();
7901 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7902 fp0
= tcg_temp_new_i64();
7903 gen_load_fpr64(ctx
, fp0
, fs
);
7904 gen_store_fpr64(ctx
, fp0
, fd
);
7905 tcg_temp_free_i64(fp0
);
7912 check_cp1_64bitmode(ctx
);
7914 TCGv_i64 fp0
= tcg_temp_new_i64();
7915 TCGv_i64 fp1
= tcg_temp_new_i64();
7917 gen_load_fpr64(ctx
, fp0
, ft
);
7918 gen_load_fpr64(ctx
, fp1
, fs
);
7919 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
7920 tcg_temp_free_i64(fp1
);
7921 gen_store_fpr64(ctx
, fp0
, fd
);
7922 tcg_temp_free_i64(fp0
);
7927 check_cp1_64bitmode(ctx
);
7929 TCGv_i64 fp0
= tcg_temp_new_i64();
7930 TCGv_i64 fp1
= tcg_temp_new_i64();
7932 gen_load_fpr64(ctx
, fp0
, ft
);
7933 gen_load_fpr64(ctx
, fp1
, fs
);
7934 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
7935 tcg_temp_free_i64(fp1
);
7936 gen_store_fpr64(ctx
, fp0
, fd
);
7937 tcg_temp_free_i64(fp0
);
7942 check_cp1_64bitmode(ctx
);
7944 TCGv_i64 fp0
= tcg_temp_new_i64();
7945 TCGv_i64 fp1
= tcg_temp_new_i64();
7947 gen_load_fpr64(ctx
, fp0
, fs
);
7948 gen_load_fpr64(ctx
, fp1
, ft
);
7949 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
7950 tcg_temp_free_i64(fp1
);
7951 gen_store_fpr64(ctx
, fp0
, fd
);
7952 tcg_temp_free_i64(fp0
);
7957 check_cp1_64bitmode(ctx
);
7959 TCGv_i64 fp0
= tcg_temp_new_i64();
7961 gen_load_fpr64(ctx
, fp0
, fs
);
7962 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
7963 gen_store_fpr64(ctx
, fp0
, fd
);
7964 tcg_temp_free_i64(fp0
);
7969 check_cp1_64bitmode(ctx
);
7971 TCGv_i64 fp0
= tcg_temp_new_i64();
7973 gen_load_fpr64(ctx
, fp0
, fs
);
7974 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
7975 gen_store_fpr64(ctx
, fp0
, fd
);
7976 tcg_temp_free_i64(fp0
);
7981 check_cp1_64bitmode(ctx
);
7983 TCGv_i64 fp0
= tcg_temp_new_i64();
7984 TCGv_i64 fp1
= tcg_temp_new_i64();
7986 gen_load_fpr64(ctx
, fp0
, fs
);
7987 gen_load_fpr64(ctx
, fp1
, ft
);
7988 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
7989 tcg_temp_free_i64(fp1
);
7990 gen_store_fpr64(ctx
, fp0
, fd
);
7991 tcg_temp_free_i64(fp0
);
7996 check_cp1_64bitmode(ctx
);
7998 TCGv_i32 fp0
= tcg_temp_new_i32();
8000 gen_load_fpr32h(fp0
, fs
);
8001 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
8002 gen_store_fpr32(fp0
, fd
);
8003 tcg_temp_free_i32(fp0
);
8008 check_cp1_64bitmode(ctx
);
8010 TCGv_i64 fp0
= tcg_temp_new_i64();
8012 gen_load_fpr64(ctx
, fp0
, fs
);
8013 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
8014 gen_store_fpr64(ctx
, fp0
, fd
);
8015 tcg_temp_free_i64(fp0
);
8020 check_cp1_64bitmode(ctx
);
8022 TCGv_i32 fp0
= tcg_temp_new_i32();
8024 gen_load_fpr32(fp0
, fs
);
8025 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
8026 gen_store_fpr32(fp0
, fd
);
8027 tcg_temp_free_i32(fp0
);
8032 check_cp1_64bitmode(ctx
);
8034 TCGv_i32 fp0
= tcg_temp_new_i32();
8035 TCGv_i32 fp1
= tcg_temp_new_i32();
8037 gen_load_fpr32(fp0
, fs
);
8038 gen_load_fpr32(fp1
, ft
);
8039 gen_store_fpr32h(fp0
, fd
);
8040 gen_store_fpr32(fp1
, fd
);
8041 tcg_temp_free_i32(fp0
);
8042 tcg_temp_free_i32(fp1
);
8047 check_cp1_64bitmode(ctx
);
8049 TCGv_i32 fp0
= tcg_temp_new_i32();
8050 TCGv_i32 fp1
= tcg_temp_new_i32();
8052 gen_load_fpr32(fp0
, fs
);
8053 gen_load_fpr32h(fp1
, ft
);
8054 gen_store_fpr32(fp1
, fd
);
8055 gen_store_fpr32h(fp0
, fd
);
8056 tcg_temp_free_i32(fp0
);
8057 tcg_temp_free_i32(fp1
);
8062 check_cp1_64bitmode(ctx
);
8064 TCGv_i32 fp0
= tcg_temp_new_i32();
8065 TCGv_i32 fp1
= tcg_temp_new_i32();
8067 gen_load_fpr32h(fp0
, fs
);
8068 gen_load_fpr32(fp1
, ft
);
8069 gen_store_fpr32(fp1
, fd
);
8070 gen_store_fpr32h(fp0
, fd
);
8071 tcg_temp_free_i32(fp0
);
8072 tcg_temp_free_i32(fp1
);
8077 check_cp1_64bitmode(ctx
);
8079 TCGv_i32 fp0
= tcg_temp_new_i32();
8080 TCGv_i32 fp1
= tcg_temp_new_i32();
8082 gen_load_fpr32h(fp0
, fs
);
8083 gen_load_fpr32h(fp1
, ft
);
8084 gen_store_fpr32(fp1
, fd
);
8085 gen_store_fpr32h(fp0
, fd
);
8086 tcg_temp_free_i32(fp0
);
8087 tcg_temp_free_i32(fp1
);
8094 case OPC_CMP_UEQ_PS
:
8095 case OPC_CMP_OLT_PS
:
8096 case OPC_CMP_ULT_PS
:
8097 case OPC_CMP_OLE_PS
:
8098 case OPC_CMP_ULE_PS
:
8100 case OPC_CMP_NGLE_PS
:
8101 case OPC_CMP_SEQ_PS
:
8102 case OPC_CMP_NGL_PS
:
8104 case OPC_CMP_NGE_PS
:
8106 case OPC_CMP_NGT_PS
:
8107 if (ctx
->opcode
& (1 << 6)) {
8108 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
8109 opn
= condnames_abs
[func
-48];
8111 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
8112 opn
= condnames
[func
-48];
8117 generate_exception (ctx
, EXCP_RI
);
8120 (void)opn
; /* avoid a compiler warning */
8123 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
8126 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
8129 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
8134 /* Coprocessor 3 (FPU) */
8135 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
8136 int fd
, int fs
, int base
, int index
)
8138 const char *opn
= "extended float load/store";
8140 TCGv t0
= tcg_temp_new();
8143 gen_load_gpr(t0
, index
);
8144 } else if (index
== 0) {
8145 gen_load_gpr(t0
, base
);
8147 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
8149 /* Don't do NOP if destination is zero: we must perform the actual
8151 save_cpu_state(ctx
, 0);
8156 TCGv_i32 fp0
= tcg_temp_new_i32();
8158 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
8159 tcg_gen_trunc_tl_i32(fp0
, t0
);
8160 gen_store_fpr32(fp0
, fd
);
8161 tcg_temp_free_i32(fp0
);
8167 check_cp1_registers(ctx
, fd
);
8169 TCGv_i64 fp0
= tcg_temp_new_i64();
8171 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8172 gen_store_fpr64(ctx
, fp0
, fd
);
8173 tcg_temp_free_i64(fp0
);
8178 check_cp1_64bitmode(ctx
);
8179 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8181 TCGv_i64 fp0
= tcg_temp_new_i64();
8183 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8184 gen_store_fpr64(ctx
, fp0
, fd
);
8185 tcg_temp_free_i64(fp0
);
8192 TCGv_i32 fp0
= tcg_temp_new_i32();
8193 TCGv t1
= tcg_temp_new();
8195 gen_load_fpr32(fp0
, fs
);
8196 tcg_gen_extu_i32_tl(t1
, fp0
);
8197 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
8198 tcg_temp_free_i32(fp0
);
8206 check_cp1_registers(ctx
, fs
);
8208 TCGv_i64 fp0
= tcg_temp_new_i64();
8210 gen_load_fpr64(ctx
, fp0
, fs
);
8211 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8212 tcg_temp_free_i64(fp0
);
8218 check_cp1_64bitmode(ctx
);
8219 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8221 TCGv_i64 fp0
= tcg_temp_new_i64();
8223 gen_load_fpr64(ctx
, fp0
, fs
);
8224 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8225 tcg_temp_free_i64(fp0
);
8232 (void)opn
; (void)store
; /* avoid compiler warnings */
8233 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
8234 regnames
[index
], regnames
[base
]);
8237 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
8238 int fd
, int fr
, int fs
, int ft
)
8240 const char *opn
= "flt3_arith";
8244 check_cp1_64bitmode(ctx
);
8246 TCGv t0
= tcg_temp_local_new();
8247 TCGv_i32 fp
= tcg_temp_new_i32();
8248 TCGv_i32 fph
= tcg_temp_new_i32();
8249 int l1
= gen_new_label();
8250 int l2
= gen_new_label();
8252 gen_load_gpr(t0
, fr
);
8253 tcg_gen_andi_tl(t0
, t0
, 0x7);
8255 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
8256 gen_load_fpr32(fp
, fs
);
8257 gen_load_fpr32h(fph
, fs
);
8258 gen_store_fpr32(fp
, fd
);
8259 gen_store_fpr32h(fph
, fd
);
8262 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
8264 #ifdef TARGET_WORDS_BIGENDIAN
8265 gen_load_fpr32(fp
, fs
);
8266 gen_load_fpr32h(fph
, ft
);
8267 gen_store_fpr32h(fp
, fd
);
8268 gen_store_fpr32(fph
, fd
);
8270 gen_load_fpr32h(fph
, fs
);
8271 gen_load_fpr32(fp
, ft
);
8272 gen_store_fpr32(fph
, fd
);
8273 gen_store_fpr32h(fp
, fd
);
8276 tcg_temp_free_i32(fp
);
8277 tcg_temp_free_i32(fph
);
8284 TCGv_i32 fp0
= tcg_temp_new_i32();
8285 TCGv_i32 fp1
= tcg_temp_new_i32();
8286 TCGv_i32 fp2
= tcg_temp_new_i32();
8288 gen_load_fpr32(fp0
, fs
);
8289 gen_load_fpr32(fp1
, ft
);
8290 gen_load_fpr32(fp2
, fr
);
8291 gen_helper_float_muladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8292 tcg_temp_free_i32(fp0
);
8293 tcg_temp_free_i32(fp1
);
8294 gen_store_fpr32(fp2
, fd
);
8295 tcg_temp_free_i32(fp2
);
8301 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8303 TCGv_i64 fp0
= tcg_temp_new_i64();
8304 TCGv_i64 fp1
= tcg_temp_new_i64();
8305 TCGv_i64 fp2
= tcg_temp_new_i64();
8307 gen_load_fpr64(ctx
, fp0
, fs
);
8308 gen_load_fpr64(ctx
, fp1
, ft
);
8309 gen_load_fpr64(ctx
, fp2
, fr
);
8310 gen_helper_float_muladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8311 tcg_temp_free_i64(fp0
);
8312 tcg_temp_free_i64(fp1
);
8313 gen_store_fpr64(ctx
, fp2
, fd
);
8314 tcg_temp_free_i64(fp2
);
8319 check_cp1_64bitmode(ctx
);
8321 TCGv_i64 fp0
= tcg_temp_new_i64();
8322 TCGv_i64 fp1
= tcg_temp_new_i64();
8323 TCGv_i64 fp2
= tcg_temp_new_i64();
8325 gen_load_fpr64(ctx
, fp0
, fs
);
8326 gen_load_fpr64(ctx
, fp1
, ft
);
8327 gen_load_fpr64(ctx
, fp2
, fr
);
8328 gen_helper_float_muladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8329 tcg_temp_free_i64(fp0
);
8330 tcg_temp_free_i64(fp1
);
8331 gen_store_fpr64(ctx
, fp2
, fd
);
8332 tcg_temp_free_i64(fp2
);
8339 TCGv_i32 fp0
= tcg_temp_new_i32();
8340 TCGv_i32 fp1
= tcg_temp_new_i32();
8341 TCGv_i32 fp2
= tcg_temp_new_i32();
8343 gen_load_fpr32(fp0
, fs
);
8344 gen_load_fpr32(fp1
, ft
);
8345 gen_load_fpr32(fp2
, fr
);
8346 gen_helper_float_mulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8347 tcg_temp_free_i32(fp0
);
8348 tcg_temp_free_i32(fp1
);
8349 gen_store_fpr32(fp2
, fd
);
8350 tcg_temp_free_i32(fp2
);
8356 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8358 TCGv_i64 fp0
= tcg_temp_new_i64();
8359 TCGv_i64 fp1
= tcg_temp_new_i64();
8360 TCGv_i64 fp2
= tcg_temp_new_i64();
8362 gen_load_fpr64(ctx
, fp0
, fs
);
8363 gen_load_fpr64(ctx
, fp1
, ft
);
8364 gen_load_fpr64(ctx
, fp2
, fr
);
8365 gen_helper_float_mulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8366 tcg_temp_free_i64(fp0
);
8367 tcg_temp_free_i64(fp1
);
8368 gen_store_fpr64(ctx
, fp2
, fd
);
8369 tcg_temp_free_i64(fp2
);
8374 check_cp1_64bitmode(ctx
);
8376 TCGv_i64 fp0
= tcg_temp_new_i64();
8377 TCGv_i64 fp1
= tcg_temp_new_i64();
8378 TCGv_i64 fp2
= tcg_temp_new_i64();
8380 gen_load_fpr64(ctx
, fp0
, fs
);
8381 gen_load_fpr64(ctx
, fp1
, ft
);
8382 gen_load_fpr64(ctx
, fp2
, fr
);
8383 gen_helper_float_mulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8384 tcg_temp_free_i64(fp0
);
8385 tcg_temp_free_i64(fp1
);
8386 gen_store_fpr64(ctx
, fp2
, fd
);
8387 tcg_temp_free_i64(fp2
);
8394 TCGv_i32 fp0
= tcg_temp_new_i32();
8395 TCGv_i32 fp1
= tcg_temp_new_i32();
8396 TCGv_i32 fp2
= tcg_temp_new_i32();
8398 gen_load_fpr32(fp0
, fs
);
8399 gen_load_fpr32(fp1
, ft
);
8400 gen_load_fpr32(fp2
, fr
);
8401 gen_helper_float_nmuladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8402 tcg_temp_free_i32(fp0
);
8403 tcg_temp_free_i32(fp1
);
8404 gen_store_fpr32(fp2
, fd
);
8405 tcg_temp_free_i32(fp2
);
8411 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8413 TCGv_i64 fp0
= tcg_temp_new_i64();
8414 TCGv_i64 fp1
= tcg_temp_new_i64();
8415 TCGv_i64 fp2
= tcg_temp_new_i64();
8417 gen_load_fpr64(ctx
, fp0
, fs
);
8418 gen_load_fpr64(ctx
, fp1
, ft
);
8419 gen_load_fpr64(ctx
, fp2
, fr
);
8420 gen_helper_float_nmuladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8421 tcg_temp_free_i64(fp0
);
8422 tcg_temp_free_i64(fp1
);
8423 gen_store_fpr64(ctx
, fp2
, fd
);
8424 tcg_temp_free_i64(fp2
);
8429 check_cp1_64bitmode(ctx
);
8431 TCGv_i64 fp0
= tcg_temp_new_i64();
8432 TCGv_i64 fp1
= tcg_temp_new_i64();
8433 TCGv_i64 fp2
= tcg_temp_new_i64();
8435 gen_load_fpr64(ctx
, fp0
, fs
);
8436 gen_load_fpr64(ctx
, fp1
, ft
);
8437 gen_load_fpr64(ctx
, fp2
, fr
);
8438 gen_helper_float_nmuladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8439 tcg_temp_free_i64(fp0
);
8440 tcg_temp_free_i64(fp1
);
8441 gen_store_fpr64(ctx
, fp2
, fd
);
8442 tcg_temp_free_i64(fp2
);
8449 TCGv_i32 fp0
= tcg_temp_new_i32();
8450 TCGv_i32 fp1
= tcg_temp_new_i32();
8451 TCGv_i32 fp2
= tcg_temp_new_i32();
8453 gen_load_fpr32(fp0
, fs
);
8454 gen_load_fpr32(fp1
, ft
);
8455 gen_load_fpr32(fp2
, fr
);
8456 gen_helper_float_nmulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8457 tcg_temp_free_i32(fp0
);
8458 tcg_temp_free_i32(fp1
);
8459 gen_store_fpr32(fp2
, fd
);
8460 tcg_temp_free_i32(fp2
);
8466 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8468 TCGv_i64 fp0
= tcg_temp_new_i64();
8469 TCGv_i64 fp1
= tcg_temp_new_i64();
8470 TCGv_i64 fp2
= tcg_temp_new_i64();
8472 gen_load_fpr64(ctx
, fp0
, fs
);
8473 gen_load_fpr64(ctx
, fp1
, ft
);
8474 gen_load_fpr64(ctx
, fp2
, fr
);
8475 gen_helper_float_nmulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8476 tcg_temp_free_i64(fp0
);
8477 tcg_temp_free_i64(fp1
);
8478 gen_store_fpr64(ctx
, fp2
, fd
);
8479 tcg_temp_free_i64(fp2
);
8484 check_cp1_64bitmode(ctx
);
8486 TCGv_i64 fp0
= tcg_temp_new_i64();
8487 TCGv_i64 fp1
= tcg_temp_new_i64();
8488 TCGv_i64 fp2
= tcg_temp_new_i64();
8490 gen_load_fpr64(ctx
, fp0
, fs
);
8491 gen_load_fpr64(ctx
, fp1
, ft
);
8492 gen_load_fpr64(ctx
, fp2
, fr
);
8493 gen_helper_float_nmulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8494 tcg_temp_free_i64(fp0
);
8495 tcg_temp_free_i64(fp1
);
8496 gen_store_fpr64(ctx
, fp2
, fd
);
8497 tcg_temp_free_i64(fp2
);
8503 generate_exception (ctx
, EXCP_RI
);
8506 (void)opn
; /* avoid a compiler warning */
8507 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
8508 fregnames
[fs
], fregnames
[ft
]);
8512 gen_rdhwr (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
)
8516 #if !defined(CONFIG_USER_ONLY)
8517 /* The Linux kernel will emulate rdhwr if it's not supported natively.
8518 Therefore only check the ISA in system mode. */
8519 check_insn(env
, ctx
, ISA_MIPS32R2
);
8521 t0
= tcg_temp_new();
8525 save_cpu_state(ctx
, 1);
8526 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
8527 gen_store_gpr(t0
, rt
);
8530 save_cpu_state(ctx
, 1);
8531 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
8532 gen_store_gpr(t0
, rt
);
8535 save_cpu_state(ctx
, 1);
8536 gen_helper_rdhwr_cc(t0
, cpu_env
);
8537 gen_store_gpr(t0
, rt
);
8540 save_cpu_state(ctx
, 1);
8541 gen_helper_rdhwr_ccres(t0
, cpu_env
);
8542 gen_store_gpr(t0
, rt
);
8545 #if defined(CONFIG_USER_ONLY)
8546 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUMIPSState
, tls_value
));
8547 gen_store_gpr(t0
, rt
);
8550 /* XXX: Some CPUs implement this in hardware.
8551 Not supported yet. */
8553 default: /* Invalid */
8554 MIPS_INVAL("rdhwr");
8555 generate_exception(ctx
, EXCP_RI
);
8561 static void handle_delay_slot (CPUMIPSState
*env
, DisasContext
*ctx
,
8564 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8565 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8566 /* Branches completion */
8567 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8568 ctx
->bstate
= BS_BRANCH
;
8569 save_cpu_state(ctx
, 0);
8570 /* FIXME: Need to clear can_do_io. */
8571 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
8573 /* unconditional branch */
8574 MIPS_DEBUG("unconditional branch");
8575 if (proc_hflags
& MIPS_HFLAG_BX
) {
8576 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
8578 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8581 /* blikely taken case */
8582 MIPS_DEBUG("blikely branch taken");
8583 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8586 /* Conditional branch */
8587 MIPS_DEBUG("conditional branch");
8589 int l1
= gen_new_label();
8591 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
8592 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
8594 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8598 /* unconditional branch to register */
8599 MIPS_DEBUG("branch to register");
8600 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
8601 TCGv t0
= tcg_temp_new();
8602 TCGv_i32 t1
= tcg_temp_new_i32();
8604 tcg_gen_andi_tl(t0
, btarget
, 0x1);
8605 tcg_gen_trunc_tl_i32(t1
, t0
);
8607 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
8608 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
8609 tcg_gen_or_i32(hflags
, hflags
, t1
);
8610 tcg_temp_free_i32(t1
);
8612 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
8614 tcg_gen_mov_tl(cpu_PC
, btarget
);
8616 if (ctx
->singlestep_enabled
) {
8617 save_cpu_state(ctx
, 0);
8618 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
8623 MIPS_DEBUG("unknown branch");
8629 /* ISA extensions (ASEs) */
8630 /* MIPS16 extension to MIPS32 */
8632 /* MIPS16 major opcodes */
8634 M16_OPC_ADDIUSP
= 0x00,
8635 M16_OPC_ADDIUPC
= 0x01,
8638 M16_OPC_BEQZ
= 0x04,
8639 M16_OPC_BNEQZ
= 0x05,
8640 M16_OPC_SHIFT
= 0x06,
8642 M16_OPC_RRIA
= 0x08,
8643 M16_OPC_ADDIU8
= 0x09,
8644 M16_OPC_SLTI
= 0x0a,
8645 M16_OPC_SLTIU
= 0x0b,
8648 M16_OPC_CMPI
= 0x0e,
8652 M16_OPC_LWSP
= 0x12,
8656 M16_OPC_LWPC
= 0x16,
8660 M16_OPC_SWSP
= 0x1a,
8664 M16_OPC_EXTEND
= 0x1e,
8668 /* I8 funct field */
8687 /* RR funct field */
8721 /* I64 funct field */
8733 /* RR ry field for CNVT */
8735 RR_RY_CNVT_ZEB
= 0x0,
8736 RR_RY_CNVT_ZEH
= 0x1,
8737 RR_RY_CNVT_ZEW
= 0x2,
8738 RR_RY_CNVT_SEB
= 0x4,
8739 RR_RY_CNVT_SEH
= 0x5,
8740 RR_RY_CNVT_SEW
= 0x6,
8743 static int xlat (int r
)
8745 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
8750 static void gen_mips16_save (DisasContext
*ctx
,
8751 int xsregs
, int aregs
,
8752 int do_ra
, int do_s0
, int do_s1
,
8755 TCGv t0
= tcg_temp_new();
8756 TCGv t1
= tcg_temp_new();
8786 generate_exception(ctx
, EXCP_RI
);
8792 gen_base_offset_addr(ctx
, t0
, 29, 12);
8793 gen_load_gpr(t1
, 7);
8794 op_st_sw(t1
, t0
, ctx
);
8797 gen_base_offset_addr(ctx
, t0
, 29, 8);
8798 gen_load_gpr(t1
, 6);
8799 op_st_sw(t1
, t0
, ctx
);
8802 gen_base_offset_addr(ctx
, t0
, 29, 4);
8803 gen_load_gpr(t1
, 5);
8804 op_st_sw(t1
, t0
, ctx
);
8807 gen_base_offset_addr(ctx
, t0
, 29, 0);
8808 gen_load_gpr(t1
, 4);
8809 op_st_sw(t1
, t0
, ctx
);
8812 gen_load_gpr(t0
, 29);
8814 #define DECR_AND_STORE(reg) do { \
8815 tcg_gen_subi_tl(t0, t0, 4); \
8816 gen_load_gpr(t1, reg); \
8817 op_st_sw(t1, t0, ctx); \
8881 generate_exception(ctx
, EXCP_RI
);
8897 #undef DECR_AND_STORE
8899 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8904 static void gen_mips16_restore (DisasContext
*ctx
,
8905 int xsregs
, int aregs
,
8906 int do_ra
, int do_s0
, int do_s1
,
8910 TCGv t0
= tcg_temp_new();
8911 TCGv t1
= tcg_temp_new();
8913 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8915 #define DECR_AND_LOAD(reg) do { \
8916 tcg_gen_subi_tl(t0, t0, 4); \
8917 op_ld_lw(t1, t0, ctx); \
8918 gen_store_gpr(t1, reg); \
8982 generate_exception(ctx
, EXCP_RI
);
8998 #undef DECR_AND_LOAD
9000 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
9005 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
9006 int is_64_bit
, int extended
)
9010 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9011 generate_exception(ctx
, EXCP_RI
);
9015 t0
= tcg_temp_new();
9017 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
9018 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
9020 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9026 #if defined(TARGET_MIPS64)
9027 static void decode_i64_mips16 (CPUMIPSState
*env
, DisasContext
*ctx
,
9028 int ry
, int funct
, int16_t offset
,
9034 offset
= extended
? offset
: offset
<< 3;
9035 gen_ld(env
, ctx
, OPC_LD
, ry
, 29, offset
);
9039 offset
= extended
? offset
: offset
<< 3;
9040 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
9044 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
9045 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
9049 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
9050 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
9053 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9054 generate_exception(ctx
, EXCP_RI
);
9056 offset
= extended
? offset
: offset
<< 3;
9057 gen_ld(env
, ctx
, OPC_LDPC
, ry
, 0, offset
);
9062 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
9063 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
9067 offset
= extended
? offset
: offset
<< 2;
9068 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
9072 offset
= extended
? offset
: offset
<< 2;
9073 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
9079 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9082 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9083 int op
, rx
, ry
, funct
, sa
;
9084 int16_t imm
, offset
;
9086 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
9087 op
= (ctx
->opcode
>> 11) & 0x1f;
9088 sa
= (ctx
->opcode
>> 22) & 0x1f;
9089 funct
= (ctx
->opcode
>> 8) & 0x7;
9090 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9091 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9092 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
9093 | ((ctx
->opcode
>> 21) & 0x3f) << 5
9094 | (ctx
->opcode
& 0x1f));
9096 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
9099 case M16_OPC_ADDIUSP
:
9100 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9102 case M16_OPC_ADDIUPC
:
9103 gen_addiupc(ctx
, rx
, imm
, 0, 1);
9106 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
9107 /* No delay slot, so just process as a normal instruction */
9110 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
9111 /* No delay slot, so just process as a normal instruction */
9114 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
9115 /* No delay slot, so just process as a normal instruction */
9118 switch (ctx
->opcode
& 0x3) {
9120 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9123 #if defined(TARGET_MIPS64)
9125 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9127 generate_exception(ctx
, EXCP_RI
);
9131 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9134 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9138 #if defined(TARGET_MIPS64)
9141 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
);
9145 imm
= ctx
->opcode
& 0xf;
9146 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
9147 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
9148 imm
= (int16_t) (imm
<< 1) >> 1;
9149 if ((ctx
->opcode
>> 4) & 0x1) {
9150 #if defined(TARGET_MIPS64)
9152 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9154 generate_exception(ctx
, EXCP_RI
);
9157 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9160 case M16_OPC_ADDIU8
:
9161 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9164 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9167 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9172 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
9175 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
9178 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
9181 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
9185 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
9186 int aregs
= (ctx
->opcode
>> 16) & 0xf;
9187 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
9188 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
9189 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
9190 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
9191 | (ctx
->opcode
& 0xf)) << 3;
9193 if (ctx
->opcode
& (1 << 7)) {
9194 gen_mips16_save(ctx
, xsregs
, aregs
,
9195 do_ra
, do_s0
, do_s1
,
9198 gen_mips16_restore(ctx
, xsregs
, aregs
,
9199 do_ra
, do_s0
, do_s1
,
9205 generate_exception(ctx
, EXCP_RI
);
9210 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
9213 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
9215 #if defined(TARGET_MIPS64)
9217 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
9221 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9224 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
);
9227 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, offset
);
9230 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
);
9233 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9236 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
);
9239 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, offset
);
9241 #if defined(TARGET_MIPS64)
9243 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
);
9247 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9250 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
9253 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
9256 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
9258 #if defined(TARGET_MIPS64)
9260 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
9264 generate_exception(ctx
, EXCP_RI
);
9271 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9276 int op
, cnvt_op
, op1
, offset
;
9280 op
= (ctx
->opcode
>> 11) & 0x1f;
9281 sa
= (ctx
->opcode
>> 2) & 0x7;
9282 sa
= sa
== 0 ? 8 : sa
;
9283 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9284 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
9285 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9286 op1
= offset
= ctx
->opcode
& 0x1f;
9291 case M16_OPC_ADDIUSP
:
9293 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
9295 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9298 case M16_OPC_ADDIUPC
:
9299 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
9302 offset
= (ctx
->opcode
& 0x7ff) << 1;
9303 offset
= (int16_t)(offset
<< 4) >> 4;
9304 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
9305 /* No delay slot, so just process as a normal instruction */
9308 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9309 offset
= (((ctx
->opcode
& 0x1f) << 21)
9310 | ((ctx
->opcode
>> 5) & 0x1f) << 16
9312 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
9313 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
9318 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9319 /* No delay slot, so just process as a normal instruction */
9322 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9323 /* No delay slot, so just process as a normal instruction */
9326 switch (ctx
->opcode
& 0x3) {
9328 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9331 #if defined(TARGET_MIPS64)
9333 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9335 generate_exception(ctx
, EXCP_RI
);
9339 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9342 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9346 #if defined(TARGET_MIPS64)
9349 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
9354 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
9356 if ((ctx
->opcode
>> 4) & 1) {
9357 #if defined(TARGET_MIPS64)
9359 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9361 generate_exception(ctx
, EXCP_RI
);
9364 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9368 case M16_OPC_ADDIU8
:
9370 int16_t imm
= (int8_t) ctx
->opcode
;
9372 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9377 int16_t imm
= (uint8_t) ctx
->opcode
;
9378 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9383 int16_t imm
= (uint8_t) ctx
->opcode
;
9384 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9391 funct
= (ctx
->opcode
>> 8) & 0x7;
9394 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
9395 ((int8_t)ctx
->opcode
) << 1);
9398 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
9399 ((int8_t)ctx
->opcode
) << 1);
9402 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9405 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
9406 ((int8_t)ctx
->opcode
) << 3);
9410 int do_ra
= ctx
->opcode
& (1 << 6);
9411 int do_s0
= ctx
->opcode
& (1 << 5);
9412 int do_s1
= ctx
->opcode
& (1 << 4);
9413 int framesize
= ctx
->opcode
& 0xf;
9415 if (framesize
== 0) {
9418 framesize
= framesize
<< 3;
9421 if (ctx
->opcode
& (1 << 7)) {
9422 gen_mips16_save(ctx
, 0, 0,
9423 do_ra
, do_s0
, do_s1
, framesize
);
9425 gen_mips16_restore(ctx
, 0, 0,
9426 do_ra
, do_s0
, do_s1
, framesize
);
9432 int rz
= xlat(ctx
->opcode
& 0x7);
9434 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9435 ((ctx
->opcode
>> 5) & 0x7);
9436 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
9440 reg32
= ctx
->opcode
& 0x1f;
9441 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
9444 generate_exception(ctx
, EXCP_RI
);
9451 int16_t imm
= (uint8_t) ctx
->opcode
;
9453 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
9458 int16_t imm
= (uint8_t) ctx
->opcode
;
9459 gen_logic_imm(env
, ctx
, OPC_XORI
, 24, rx
, imm
);
9462 #if defined(TARGET_MIPS64)
9465 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9469 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9472 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9475 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9478 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9481 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9484 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
9487 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
9489 #if defined (TARGET_MIPS64)
9492 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
9496 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9499 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
9502 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9505 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
9509 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
9512 switch (ctx
->opcode
& 0x3) {
9514 mips32_op
= OPC_ADDU
;
9517 mips32_op
= OPC_SUBU
;
9519 #if defined(TARGET_MIPS64)
9521 mips32_op
= OPC_DADDU
;
9525 mips32_op
= OPC_DSUBU
;
9530 generate_exception(ctx
, EXCP_RI
);
9534 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
9543 int nd
= (ctx
->opcode
>> 7) & 0x1;
9544 int link
= (ctx
->opcode
>> 6) & 0x1;
9545 int ra
= (ctx
->opcode
>> 5) & 0x1;
9548 op
= nd
? OPC_JALRC
: OPC_JALRS
;
9553 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
9560 /* XXX: not clear which exception should be raised
9561 * when in debug mode...
9563 check_insn(env
, ctx
, ISA_MIPS32
);
9564 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9565 generate_exception(ctx
, EXCP_DBp
);
9567 generate_exception(ctx
, EXCP_DBp
);
9571 gen_slt(env
, ctx
, OPC_SLT
, 24, rx
, ry
);
9574 gen_slt(env
, ctx
, OPC_SLTU
, 24, rx
, ry
);
9577 generate_exception(ctx
, EXCP_BREAK
);
9580 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
9583 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
9586 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
9588 #if defined (TARGET_MIPS64)
9591 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
9595 gen_logic(env
, ctx
, OPC_XOR
, 24, rx
, ry
);
9598 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
9601 gen_logic(env
, ctx
, OPC_AND
, rx
, rx
, ry
);
9604 gen_logic(env
, ctx
, OPC_OR
, rx
, rx
, ry
);
9607 gen_logic(env
, ctx
, OPC_XOR
, rx
, rx
, ry
);
9610 gen_logic(env
, ctx
, OPC_NOR
, rx
, ry
, 0);
9613 gen_HILO(ctx
, OPC_MFHI
, rx
);
9617 case RR_RY_CNVT_ZEB
:
9618 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9620 case RR_RY_CNVT_ZEH
:
9621 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9623 case RR_RY_CNVT_SEB
:
9624 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9626 case RR_RY_CNVT_SEH
:
9627 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9629 #if defined (TARGET_MIPS64)
9630 case RR_RY_CNVT_ZEW
:
9632 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9634 case RR_RY_CNVT_SEW
:
9636 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9640 generate_exception(ctx
, EXCP_RI
);
9645 gen_HILO(ctx
, OPC_MFLO
, rx
);
9647 #if defined (TARGET_MIPS64)
9650 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
9654 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
9658 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
9662 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
9666 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
9669 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
9672 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
9675 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
9677 #if defined (TARGET_MIPS64)
9680 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
9684 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
9688 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
9692 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
9696 generate_exception(ctx
, EXCP_RI
);
9700 case M16_OPC_EXTEND
:
9701 decode_extended_mips16_opc(env
, ctx
, is_branch
);
9704 #if defined(TARGET_MIPS64)
9706 funct
= (ctx
->opcode
>> 8) & 0x7;
9707 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
9711 generate_exception(ctx
, EXCP_RI
);
9718 /* microMIPS extension to MIPS32 */
9720 /* microMIPS32 major opcodes */
9759 /* 0x20 is reserved */
9769 /* 0x28 and 0x29 are reserved */
9779 /* 0x30 and 0x31 are reserved */
9789 /* 0x38 and 0x39 are reserved */
9800 /* POOL32A encoding of minor opcode field */
9803 /* These opcodes are distinguished only by bits 9..6; those bits are
9804 * what are recorded below. */
9830 /* The following can be distinguished by their lower 6 bits. */
9836 /* POOL32AXF encoding of minor opcode field extension */
9850 /* bits 13..12 for 0x01 */
9856 /* bits 13..12 for 0x2a */
9862 /* bits 13..12 for 0x32 */
9866 /* bits 15..12 for 0x2c */
9882 /* bits 15..12 for 0x34 */
9890 /* bits 15..12 for 0x3c */
9892 JR
= 0x0, /* alias */
9897 /* bits 15..12 for 0x05 */
9901 /* bits 15..12 for 0x0d */
9911 /* bits 15..12 for 0x15 */
9917 /* bits 15..12 for 0x1d */
9921 /* bits 15..12 for 0x2d */
9926 /* bits 15..12 for 0x35 */
9933 /* POOL32B encoding of minor opcode field (bits 15..12) */
9949 /* POOL32C encoding of minor opcode field (bits 15..12) */
9957 /* 0xa is reserved */
9964 /* 0x6 is reserved */
9970 /* POOL32F encoding of minor opcode field (bits 5..0) */
9973 /* These are the bit 7..6 values */
9984 /* These are the bit 8..6 values */
10028 CABS_COND_FMT
= 0x1c, /* MIPS3D */
10032 /* POOL32Fxf encoding of minor opcode extension field */
10070 /* POOL32I encoding of minor opcode field (bits 25..21) */
10095 /* These overlap and are distinguished by bit16 of the instruction */
10104 /* POOL16A encoding of minor opcode field */
10111 /* POOL16B encoding of minor opcode field */
10118 /* POOL16C encoding of minor opcode field */
10138 /* POOL16D encoding of minor opcode field */
10145 /* POOL16E encoding of minor opcode field */
10152 static int mmreg (int r
)
10154 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10159 /* Used for 16-bit store instructions. */
10160 static int mmreg2 (int r
)
10162 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
10167 #define uMIPS_RD(op) ((op >> 7) & 0x7)
10168 #define uMIPS_RS(op) ((op >> 4) & 0x7)
10169 #define uMIPS_RS2(op) uMIPS_RS(op)
10170 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
10171 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
10172 #define uMIPS_RS5(op) (op & 0x1f)
10174 /* Signed immediate */
10175 #define SIMM(op, start, width) \
10176 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
10179 /* Zero-extended immediate */
10180 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
10182 static void gen_addiur1sp (CPUMIPSState
*env
, DisasContext
*ctx
)
10184 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10186 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
10189 static void gen_addiur2 (CPUMIPSState
*env
, DisasContext
*ctx
)
10191 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
10192 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10193 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10195 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
10198 static void gen_addiusp (CPUMIPSState
*env
, DisasContext
*ctx
)
10200 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
10203 if (encoded
<= 1) {
10204 decoded
= 256 + encoded
;
10205 } else if (encoded
<= 255) {
10207 } else if (encoded
<= 509) {
10208 decoded
= encoded
- 512;
10210 decoded
= encoded
- 768;
10213 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
10216 static void gen_addius5 (CPUMIPSState
*env
, DisasContext
*ctx
)
10218 int imm
= SIMM(ctx
->opcode
, 1, 4);
10219 int rd
= (ctx
->opcode
>> 5) & 0x1f;
10221 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
10224 static void gen_andi16 (CPUMIPSState
*env
, DisasContext
*ctx
)
10226 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
10227 31, 32, 63, 64, 255, 32768, 65535 };
10228 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10229 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10230 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
10232 gen_logic_imm(env
, ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
10235 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
10236 int base
, int16_t offset
)
10238 const char *opn
= "ldst_multiple";
10242 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10243 generate_exception(ctx
, EXCP_RI
);
10247 t0
= tcg_temp_new();
10249 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10251 t1
= tcg_const_tl(reglist
);
10252 t2
= tcg_const_i32(ctx
->mem_idx
);
10254 save_cpu_state(ctx
, 1);
10257 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
10261 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
10264 #ifdef TARGET_MIPS64
10266 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
10270 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
10276 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
10279 tcg_temp_free_i32(t2
);
10283 static void gen_pool16c_insn (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
10285 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
10286 int rs
= mmreg(ctx
->opcode
& 0x7);
10289 switch (((ctx
->opcode
) >> 4) & 0x3f) {
10294 gen_logic(env
, ctx
, OPC_NOR
, rd
, rs
, 0);
10300 gen_logic(env
, ctx
, OPC_XOR
, rd
, rd
, rs
);
10306 gen_logic(env
, ctx
, OPC_AND
, rd
, rd
, rs
);
10312 gen_logic(env
, ctx
, OPC_OR
, rd
, rd
, rs
);
10319 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10320 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10322 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
10331 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10332 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10334 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
10341 int reg
= ctx
->opcode
& 0x1f;
10343 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10350 int reg
= ctx
->opcode
& 0x1f;
10352 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10353 /* Let normal delay slot handling in our caller take us
10354 to the branch target. */
10366 int reg
= ctx
->opcode
& 0x1f;
10368 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
10374 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
10378 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
10381 generate_exception(ctx
, EXCP_BREAK
);
10384 /* XXX: not clear which exception should be raised
10385 * when in debug mode...
10387 check_insn(env
, ctx
, ISA_MIPS32
);
10388 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10389 generate_exception(ctx
, EXCP_DBp
);
10391 generate_exception(ctx
, EXCP_DBp
);
10394 case JRADDIUSP
+ 0:
10395 case JRADDIUSP
+ 1:
10397 int imm
= ZIMM(ctx
->opcode
, 0, 5);
10399 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
10400 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
10401 /* Let normal delay slot handling in our caller take us
10402 to the branch target. */
10406 generate_exception(ctx
, EXCP_RI
);
10411 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10413 TCGv t0
= tcg_temp_new();
10414 TCGv t1
= tcg_temp_new();
10416 gen_load_gpr(t0
, base
);
10419 gen_load_gpr(t1
, index
);
10420 tcg_gen_shli_tl(t1
, t1
, 2);
10421 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10424 save_cpu_state(ctx
, 0);
10425 op_ld_lw(t1
, t0
, ctx
);
10426 gen_store_gpr(t1
, rd
);
10432 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10433 int base
, int16_t offset
)
10435 const char *opn
= "ldst_pair";
10438 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
10439 generate_exception(ctx
, EXCP_RI
);
10443 t0
= tcg_temp_new();
10444 t1
= tcg_temp_new();
10446 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10451 generate_exception(ctx
, EXCP_RI
);
10454 save_cpu_state(ctx
, 0);
10455 op_ld_lw(t1
, t0
, ctx
);
10456 gen_store_gpr(t1
, rd
);
10457 tcg_gen_movi_tl(t1
, 4);
10458 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10459 op_ld_lw(t1
, t0
, ctx
);
10460 gen_store_gpr(t1
, rd
+1);
10464 save_cpu_state(ctx
, 0);
10465 gen_load_gpr(t1
, rd
);
10466 op_st_sw(t1
, t0
, ctx
);
10467 tcg_gen_movi_tl(t1
, 4);
10468 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10469 gen_load_gpr(t1
, rd
+1);
10470 op_st_sw(t1
, t0
, ctx
);
10473 #ifdef TARGET_MIPS64
10476 generate_exception(ctx
, EXCP_RI
);
10479 save_cpu_state(ctx
, 0);
10480 op_ld_ld(t1
, t0
, ctx
);
10481 gen_store_gpr(t1
, rd
);
10482 tcg_gen_movi_tl(t1
, 8);
10483 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10484 op_ld_ld(t1
, t0
, ctx
);
10485 gen_store_gpr(t1
, rd
+1);
10489 save_cpu_state(ctx
, 0);
10490 gen_load_gpr(t1
, rd
);
10491 op_st_sd(t1
, t0
, ctx
);
10492 tcg_gen_movi_tl(t1
, 8);
10493 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10494 gen_load_gpr(t1
, rd
+1);
10495 op_st_sd(t1
, t0
, ctx
);
10500 (void)opn
; /* avoid a compiler warning */
10501 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
10506 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
,
10509 int extension
= (ctx
->opcode
>> 6) & 0x3f;
10510 int minor
= (ctx
->opcode
>> 12) & 0xf;
10511 uint32_t mips32_op
;
10513 switch (extension
) {
10515 mips32_op
= OPC_TEQ
;
10518 mips32_op
= OPC_TGE
;
10521 mips32_op
= OPC_TGEU
;
10524 mips32_op
= OPC_TLT
;
10527 mips32_op
= OPC_TLTU
;
10530 mips32_op
= OPC_TNE
;
10532 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
10534 #ifndef CONFIG_USER_ONLY
10537 check_cp0_enabled(ctx
);
10539 /* Treat as NOP. */
10542 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
10546 check_cp0_enabled(ctx
);
10548 TCGv t0
= tcg_temp_new();
10550 gen_load_gpr(t0
, rt
);
10551 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
10559 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
10562 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
10565 mips32_op
= OPC_CLO
;
10568 mips32_op
= OPC_CLZ
;
10570 check_insn(env
, ctx
, ISA_MIPS32
);
10571 gen_cl(ctx
, mips32_op
, rt
, rs
);
10574 gen_rdhwr(env
, ctx
, rt
, rs
);
10577 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
10580 mips32_op
= OPC_MULT
;
10583 mips32_op
= OPC_MULTU
;
10586 mips32_op
= OPC_DIV
;
10589 mips32_op
= OPC_DIVU
;
10592 mips32_op
= OPC_MADD
;
10595 mips32_op
= OPC_MADDU
;
10598 mips32_op
= OPC_MSUB
;
10601 mips32_op
= OPC_MSUBU
;
10603 check_insn(env
, ctx
, ISA_MIPS32
);
10604 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
10607 goto pool32axf_invalid
;
10618 generate_exception_err(ctx
, EXCP_CpU
, 2);
10621 goto pool32axf_invalid
;
10628 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
10633 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
10637 goto pool32axf_invalid
;
10643 check_cp0_enabled(ctx
);
10644 check_insn(env
, ctx
, ISA_MIPS32R2
);
10645 gen_load_srsgpr(rt
, rs
);
10648 check_cp0_enabled(ctx
);
10649 check_insn(env
, ctx
, ISA_MIPS32R2
);
10650 gen_store_srsgpr(rt
, rs
);
10653 goto pool32axf_invalid
;
10656 #ifndef CONFIG_USER_ONLY
10660 mips32_op
= OPC_TLBP
;
10663 mips32_op
= OPC_TLBR
;
10666 mips32_op
= OPC_TLBWI
;
10669 mips32_op
= OPC_TLBWR
;
10672 mips32_op
= OPC_WAIT
;
10675 mips32_op
= OPC_DERET
;
10678 mips32_op
= OPC_ERET
;
10680 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
10683 goto pool32axf_invalid
;
10689 check_cp0_enabled(ctx
);
10691 TCGv t0
= tcg_temp_new();
10693 save_cpu_state(ctx
, 1);
10694 gen_helper_di(t0
, cpu_env
);
10695 gen_store_gpr(t0
, rs
);
10696 /* Stop translation as we may have switched the execution mode */
10697 ctx
->bstate
= BS_STOP
;
10702 check_cp0_enabled(ctx
);
10704 TCGv t0
= tcg_temp_new();
10706 save_cpu_state(ctx
, 1);
10707 gen_helper_ei(t0
, cpu_env
);
10708 gen_store_gpr(t0
, rs
);
10709 /* Stop translation as we may have switched the execution mode */
10710 ctx
->bstate
= BS_STOP
;
10715 goto pool32axf_invalid
;
10725 generate_exception(ctx
, EXCP_SYSCALL
);
10726 ctx
->bstate
= BS_STOP
;
10729 check_insn(env
, ctx
, ISA_MIPS32
);
10730 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10731 generate_exception(ctx
, EXCP_DBp
);
10733 generate_exception(ctx
, EXCP_DBp
);
10737 goto pool32axf_invalid
;
10743 gen_HILO(ctx
, OPC_MFHI
, rs
);
10746 gen_HILO(ctx
, OPC_MFLO
, rs
);
10749 gen_HILO(ctx
, OPC_MTHI
, rs
);
10752 gen_HILO(ctx
, OPC_MTLO
, rs
);
10755 goto pool32axf_invalid
;
10760 MIPS_INVAL("pool32axf");
10761 generate_exception(ctx
, EXCP_RI
);
10766 /* Values for microMIPS fmt field. Variable-width, depending on which
10767 formats the instruction supports. */
10786 static void gen_pool32fxf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
10788 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
10789 uint32_t mips32_op
;
10791 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
10792 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
10793 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
10795 switch (extension
) {
10796 case FLOAT_1BIT_FMT(CFC1
, 0):
10797 mips32_op
= OPC_CFC1
;
10799 case FLOAT_1BIT_FMT(CTC1
, 0):
10800 mips32_op
= OPC_CTC1
;
10802 case FLOAT_1BIT_FMT(MFC1
, 0):
10803 mips32_op
= OPC_MFC1
;
10805 case FLOAT_1BIT_FMT(MTC1
, 0):
10806 mips32_op
= OPC_MTC1
;
10808 case FLOAT_1BIT_FMT(MFHC1
, 0):
10809 mips32_op
= OPC_MFHC1
;
10811 case FLOAT_1BIT_FMT(MTHC1
, 0):
10812 mips32_op
= OPC_MTHC1
;
10814 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10817 /* Reciprocal square root */
10818 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10819 mips32_op
= OPC_RSQRT_S
;
10821 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10822 mips32_op
= OPC_RSQRT_D
;
10826 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10827 mips32_op
= OPC_SQRT_S
;
10829 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10830 mips32_op
= OPC_SQRT_D
;
10834 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10835 mips32_op
= OPC_RECIP_S
;
10837 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10838 mips32_op
= OPC_RECIP_D
;
10842 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10843 mips32_op
= OPC_FLOOR_L_S
;
10845 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10846 mips32_op
= OPC_FLOOR_L_D
;
10848 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10849 mips32_op
= OPC_FLOOR_W_S
;
10851 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10852 mips32_op
= OPC_FLOOR_W_D
;
10856 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10857 mips32_op
= OPC_CEIL_L_S
;
10859 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10860 mips32_op
= OPC_CEIL_L_D
;
10862 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10863 mips32_op
= OPC_CEIL_W_S
;
10865 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10866 mips32_op
= OPC_CEIL_W_D
;
10870 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10871 mips32_op
= OPC_TRUNC_L_S
;
10873 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10874 mips32_op
= OPC_TRUNC_L_D
;
10876 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10877 mips32_op
= OPC_TRUNC_W_S
;
10879 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10880 mips32_op
= OPC_TRUNC_W_D
;
10884 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10885 mips32_op
= OPC_ROUND_L_S
;
10887 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10888 mips32_op
= OPC_ROUND_L_D
;
10890 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10891 mips32_op
= OPC_ROUND_W_S
;
10893 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10894 mips32_op
= OPC_ROUND_W_D
;
10897 /* Integer to floating-point conversion */
10898 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10899 mips32_op
= OPC_CVT_L_S
;
10901 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10902 mips32_op
= OPC_CVT_L_D
;
10904 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10905 mips32_op
= OPC_CVT_W_S
;
10907 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10908 mips32_op
= OPC_CVT_W_D
;
10911 /* Paired-foo conversions */
10912 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10913 mips32_op
= OPC_CVT_S_PL
;
10915 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10916 mips32_op
= OPC_CVT_S_PU
;
10918 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10919 mips32_op
= OPC_CVT_PW_PS
;
10921 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10922 mips32_op
= OPC_CVT_PS_PW
;
10925 /* Floating-point moves */
10926 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10927 mips32_op
= OPC_MOV_S
;
10929 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10930 mips32_op
= OPC_MOV_D
;
10932 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10933 mips32_op
= OPC_MOV_PS
;
10936 /* Absolute value */
10937 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10938 mips32_op
= OPC_ABS_S
;
10940 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10941 mips32_op
= OPC_ABS_D
;
10943 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10944 mips32_op
= OPC_ABS_PS
;
10948 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10949 mips32_op
= OPC_NEG_S
;
10951 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10952 mips32_op
= OPC_NEG_D
;
10954 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10955 mips32_op
= OPC_NEG_PS
;
10958 /* Reciprocal square root step */
10959 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10960 mips32_op
= OPC_RSQRT1_S
;
10962 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10963 mips32_op
= OPC_RSQRT1_D
;
10965 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10966 mips32_op
= OPC_RSQRT1_PS
;
10969 /* Reciprocal step */
10970 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10971 mips32_op
= OPC_RECIP1_S
;
10973 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10974 mips32_op
= OPC_RECIP1_S
;
10976 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10977 mips32_op
= OPC_RECIP1_PS
;
10980 /* Conversions from double */
10981 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
10982 mips32_op
= OPC_CVT_D_S
;
10984 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
10985 mips32_op
= OPC_CVT_D_W
;
10987 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
10988 mips32_op
= OPC_CVT_D_L
;
10991 /* Conversions from single */
10992 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
10993 mips32_op
= OPC_CVT_S_D
;
10995 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
10996 mips32_op
= OPC_CVT_S_W
;
10998 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
10999 mips32_op
= OPC_CVT_S_L
;
11001 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
11004 /* Conditional moves on floating-point codes */
11005 case COND_FLOAT_MOV(MOVT
, 0):
11006 case COND_FLOAT_MOV(MOVT
, 1):
11007 case COND_FLOAT_MOV(MOVT
, 2):
11008 case COND_FLOAT_MOV(MOVT
, 3):
11009 case COND_FLOAT_MOV(MOVT
, 4):
11010 case COND_FLOAT_MOV(MOVT
, 5):
11011 case COND_FLOAT_MOV(MOVT
, 6):
11012 case COND_FLOAT_MOV(MOVT
, 7):
11013 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
11015 case COND_FLOAT_MOV(MOVF
, 0):
11016 case COND_FLOAT_MOV(MOVF
, 1):
11017 case COND_FLOAT_MOV(MOVF
, 2):
11018 case COND_FLOAT_MOV(MOVF
, 3):
11019 case COND_FLOAT_MOV(MOVF
, 4):
11020 case COND_FLOAT_MOV(MOVF
, 5):
11021 case COND_FLOAT_MOV(MOVF
, 6):
11022 case COND_FLOAT_MOV(MOVF
, 7):
11023 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
11026 MIPS_INVAL("pool32fxf");
11027 generate_exception(ctx
, EXCP_RI
);
11032 static void decode_micromips32_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
11033 uint16_t insn_hw1
, int *is_branch
)
11037 int rt
, rs
, rd
, rr
;
11039 uint32_t op
, minor
, mips32_op
;
11040 uint32_t cond
, fmt
, cc
;
11042 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11043 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
11045 rt
= (ctx
->opcode
>> 21) & 0x1f;
11046 rs
= (ctx
->opcode
>> 16) & 0x1f;
11047 rd
= (ctx
->opcode
>> 11) & 0x1f;
11048 rr
= (ctx
->opcode
>> 6) & 0x1f;
11049 imm
= (int16_t) ctx
->opcode
;
11051 op
= (ctx
->opcode
>> 26) & 0x3f;
11054 minor
= ctx
->opcode
& 0x3f;
11057 minor
= (ctx
->opcode
>> 6) & 0xf;
11060 mips32_op
= OPC_SLL
;
11063 mips32_op
= OPC_SRA
;
11066 mips32_op
= OPC_SRL
;
11069 mips32_op
= OPC_ROTR
;
11071 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
11074 goto pool32a_invalid
;
11078 minor
= (ctx
->opcode
>> 6) & 0xf;
11082 mips32_op
= OPC_ADD
;
11085 mips32_op
= OPC_ADDU
;
11088 mips32_op
= OPC_SUB
;
11091 mips32_op
= OPC_SUBU
;
11094 mips32_op
= OPC_MUL
;
11096 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11100 mips32_op
= OPC_SLLV
;
11103 mips32_op
= OPC_SRLV
;
11106 mips32_op
= OPC_SRAV
;
11109 mips32_op
= OPC_ROTRV
;
11111 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11113 /* Logical operations */
11115 mips32_op
= OPC_AND
;
11118 mips32_op
= OPC_OR
;
11121 mips32_op
= OPC_NOR
;
11124 mips32_op
= OPC_XOR
;
11126 gen_logic(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11128 /* Set less than */
11130 mips32_op
= OPC_SLT
;
11133 mips32_op
= OPC_SLTU
;
11135 gen_slt(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11138 goto pool32a_invalid
;
11142 minor
= (ctx
->opcode
>> 6) & 0xf;
11144 /* Conditional moves */
11146 mips32_op
= OPC_MOVN
;
11149 mips32_op
= OPC_MOVZ
;
11151 gen_cond_move(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11154 gen_ldxs(ctx
, rs
, rt
, rd
);
11157 goto pool32a_invalid
;
11161 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
11164 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
11167 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
11170 generate_exception(ctx
, EXCP_BREAK
);
11174 MIPS_INVAL("pool32a");
11175 generate_exception(ctx
, EXCP_RI
);
11180 minor
= (ctx
->opcode
>> 12) & 0xf;
11183 check_cp0_enabled(ctx
);
11184 /* Treat as no-op. */
11188 /* COP2: Not implemented. */
11189 generate_exception_err(ctx
, EXCP_CpU
, 2);
11193 #ifdef TARGET_MIPS64
11197 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11201 #ifdef TARGET_MIPS64
11205 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11208 MIPS_INVAL("pool32b");
11209 generate_exception(ctx
, EXCP_RI
);
11214 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11215 minor
= ctx
->opcode
& 0x3f;
11216 check_cp1_enabled(ctx
);
11219 mips32_op
= OPC_ALNV_PS
;
11222 mips32_op
= OPC_MADD_S
;
11225 mips32_op
= OPC_MADD_D
;
11228 mips32_op
= OPC_MADD_PS
;
11231 mips32_op
= OPC_MSUB_S
;
11234 mips32_op
= OPC_MSUB_D
;
11237 mips32_op
= OPC_MSUB_PS
;
11240 mips32_op
= OPC_NMADD_S
;
11243 mips32_op
= OPC_NMADD_D
;
11246 mips32_op
= OPC_NMADD_PS
;
11249 mips32_op
= OPC_NMSUB_S
;
11252 mips32_op
= OPC_NMSUB_D
;
11255 mips32_op
= OPC_NMSUB_PS
;
11257 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
11259 case CABS_COND_FMT
:
11260 cond
= (ctx
->opcode
>> 6) & 0xf;
11261 cc
= (ctx
->opcode
>> 13) & 0x7;
11262 fmt
= (ctx
->opcode
>> 10) & 0x3;
11265 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
11268 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
11271 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
11274 goto pool32f_invalid
;
11278 cond
= (ctx
->opcode
>> 6) & 0xf;
11279 cc
= (ctx
->opcode
>> 13) & 0x7;
11280 fmt
= (ctx
->opcode
>> 10) & 0x3;
11283 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
11286 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
11289 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
11292 goto pool32f_invalid
;
11296 gen_pool32fxf(env
, ctx
, rt
, rs
);
11300 switch ((ctx
->opcode
>> 6) & 0x7) {
11302 mips32_op
= OPC_PLL_PS
;
11305 mips32_op
= OPC_PLU_PS
;
11308 mips32_op
= OPC_PUL_PS
;
11311 mips32_op
= OPC_PUU_PS
;
11314 mips32_op
= OPC_CVT_PS_S
;
11316 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11319 goto pool32f_invalid
;
11324 switch ((ctx
->opcode
>> 6) & 0x7) {
11326 mips32_op
= OPC_LWXC1
;
11329 mips32_op
= OPC_SWXC1
;
11332 mips32_op
= OPC_LDXC1
;
11335 mips32_op
= OPC_SDXC1
;
11338 mips32_op
= OPC_LUXC1
;
11341 mips32_op
= OPC_SUXC1
;
11343 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
11346 goto pool32f_invalid
;
11351 fmt
= (ctx
->opcode
>> 9) & 0x3;
11352 switch ((ctx
->opcode
>> 6) & 0x7) {
11356 mips32_op
= OPC_RSQRT2_S
;
11359 mips32_op
= OPC_RSQRT2_D
;
11362 mips32_op
= OPC_RSQRT2_PS
;
11365 goto pool32f_invalid
;
11371 mips32_op
= OPC_RECIP2_S
;
11374 mips32_op
= OPC_RECIP2_D
;
11377 mips32_op
= OPC_RECIP2_PS
;
11380 goto pool32f_invalid
;
11384 mips32_op
= OPC_ADDR_PS
;
11387 mips32_op
= OPC_MULR_PS
;
11389 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11392 goto pool32f_invalid
;
11396 /* MOV[FT].fmt and PREFX */
11397 cc
= (ctx
->opcode
>> 13) & 0x7;
11398 fmt
= (ctx
->opcode
>> 9) & 0x3;
11399 switch ((ctx
->opcode
>> 6) & 0x7) {
11403 gen_movcf_s(rs
, rt
, cc
, 0);
11406 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
11409 gen_movcf_ps(rs
, rt
, cc
, 0);
11412 goto pool32f_invalid
;
11418 gen_movcf_s(rs
, rt
, cc
, 1);
11421 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
11424 gen_movcf_ps(rs
, rt
, cc
, 1);
11427 goto pool32f_invalid
;
11433 goto pool32f_invalid
;
11436 #define FINSN_3ARG_SDPS(prfx) \
11437 switch ((ctx->opcode >> 8) & 0x3) { \
11439 mips32_op = OPC_##prfx##_S; \
11442 mips32_op = OPC_##prfx##_D; \
11444 case FMT_SDPS_PS: \
11445 mips32_op = OPC_##prfx##_PS; \
11448 goto pool32f_invalid; \
11451 /* regular FP ops */
11452 switch ((ctx
->opcode
>> 6) & 0x3) {
11454 FINSN_3ARG_SDPS(ADD
);
11457 FINSN_3ARG_SDPS(SUB
);
11460 FINSN_3ARG_SDPS(MUL
);
11463 fmt
= (ctx
->opcode
>> 8) & 0x3;
11465 mips32_op
= OPC_DIV_D
;
11466 } else if (fmt
== 0) {
11467 mips32_op
= OPC_DIV_S
;
11469 goto pool32f_invalid
;
11473 goto pool32f_invalid
;
11478 switch ((ctx
->opcode
>> 6) & 0x3) {
11480 FINSN_3ARG_SDPS(MOVN
);
11483 FINSN_3ARG_SDPS(MOVZ
);
11486 goto pool32f_invalid
;
11490 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11494 MIPS_INVAL("pool32f");
11495 generate_exception(ctx
, EXCP_RI
);
11499 generate_exception_err(ctx
, EXCP_CpU
, 1);
11503 minor
= (ctx
->opcode
>> 21) & 0x1f;
11506 mips32_op
= OPC_BLTZ
;
11509 mips32_op
= OPC_BLTZAL
;
11512 mips32_op
= OPC_BLTZALS
;
11515 mips32_op
= OPC_BGEZ
;
11518 mips32_op
= OPC_BGEZAL
;
11521 mips32_op
= OPC_BGEZALS
;
11524 mips32_op
= OPC_BLEZ
;
11527 mips32_op
= OPC_BGTZ
;
11529 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
11535 mips32_op
= OPC_TLTI
;
11538 mips32_op
= OPC_TGEI
;
11541 mips32_op
= OPC_TLTIU
;
11544 mips32_op
= OPC_TGEIU
;
11547 mips32_op
= OPC_TNEI
;
11550 mips32_op
= OPC_TEQI
;
11552 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
11557 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
11558 4, rs
, 0, imm
<< 1);
11559 /* Compact branches don't have a delay slot, so just let
11560 the normal delay slot handling take us to the branch
11564 gen_logic_imm(env
, ctx
, OPC_LUI
, rs
, -1, imm
);
11570 /* COP2: Not implemented. */
11571 generate_exception_err(ctx
, EXCP_CpU
, 2);
11574 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
11577 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
11580 mips32_op
= OPC_BC1FANY4
;
11583 mips32_op
= OPC_BC1TANY4
;
11586 check_insn(env
, ctx
, ASE_MIPS3D
);
11589 gen_compute_branch1(env
, ctx
, mips32_op
,
11590 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
11595 /* MIPS DSP: not implemented */
11598 MIPS_INVAL("pool32i");
11599 generate_exception(ctx
, EXCP_RI
);
11604 minor
= (ctx
->opcode
>> 12) & 0xf;
11607 mips32_op
= OPC_LWL
;
11610 mips32_op
= OPC_SWL
;
11613 mips32_op
= OPC_LWR
;
11616 mips32_op
= OPC_SWR
;
11618 #if defined(TARGET_MIPS64)
11620 mips32_op
= OPC_LDL
;
11623 mips32_op
= OPC_SDL
;
11626 mips32_op
= OPC_LDR
;
11629 mips32_op
= OPC_SDR
;
11632 mips32_op
= OPC_LWU
;
11635 mips32_op
= OPC_LLD
;
11639 mips32_op
= OPC_LL
;
11642 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11645 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11648 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11650 #if defined(TARGET_MIPS64)
11652 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11656 /* Treat as no-op */
11659 MIPS_INVAL("pool32c");
11660 generate_exception(ctx
, EXCP_RI
);
11665 mips32_op
= OPC_ADDI
;
11668 mips32_op
= OPC_ADDIU
;
11670 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11673 /* Logical operations */
11675 mips32_op
= OPC_ORI
;
11678 mips32_op
= OPC_XORI
;
11681 mips32_op
= OPC_ANDI
;
11683 gen_logic_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11686 /* Set less than immediate */
11688 mips32_op
= OPC_SLTI
;
11691 mips32_op
= OPC_SLTIU
;
11693 gen_slt_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11696 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11697 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
11701 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
11702 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
11706 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
11710 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
11714 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
11715 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11719 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
11720 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11723 /* Floating point (COP1) */
11725 mips32_op
= OPC_LWC1
;
11728 mips32_op
= OPC_LDC1
;
11731 mips32_op
= OPC_SWC1
;
11734 mips32_op
= OPC_SDC1
;
11736 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11740 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
11741 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
11743 gen_addiupc(ctx
, reg
, offset
, 0, 0);
11746 /* Loads and stores */
11748 mips32_op
= OPC_LB
;
11751 mips32_op
= OPC_LBU
;
11754 mips32_op
= OPC_LH
;
11757 mips32_op
= OPC_LHU
;
11760 mips32_op
= OPC_LW
;
11762 #ifdef TARGET_MIPS64
11764 mips32_op
= OPC_LD
;
11767 mips32_op
= OPC_SD
;
11771 mips32_op
= OPC_SB
;
11774 mips32_op
= OPC_SH
;
11777 mips32_op
= OPC_SW
;
11780 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11783 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
11786 generate_exception(ctx
, EXCP_RI
);
11791 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
11795 /* make sure instructions are on a halfword boundary */
11796 if (ctx
->pc
& 0x1) {
11797 env
->CP0_BadVAddr
= ctx
->pc
;
11798 generate_exception(ctx
, EXCP_AdEL
);
11799 ctx
->bstate
= BS_STOP
;
11803 op
= (ctx
->opcode
>> 10) & 0x3f;
11804 /* Enforce properly-sized instructions in a delay slot */
11805 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11806 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11840 case POOL48A
: /* ??? */
11845 if (bits
& MIPS_HFLAG_BDS16
) {
11846 generate_exception(ctx
, EXCP_RI
);
11847 /* Just stop translation; the user is confused. */
11848 ctx
->bstate
= BS_STOP
;
11873 if (bits
& MIPS_HFLAG_BDS32
) {
11874 generate_exception(ctx
, EXCP_RI
);
11875 /* Just stop translation; the user is confused. */
11876 ctx
->bstate
= BS_STOP
;
11887 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11888 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11889 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11892 switch (ctx
->opcode
& 0x1) {
11901 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11906 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11907 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11908 int amount
= (ctx
->opcode
>> 1) & 0x7;
11910 amount
= amount
== 0 ? 8 : amount
;
11912 switch (ctx
->opcode
& 0x1) {
11921 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11925 gen_pool16c_insn(env
, ctx
, is_branch
);
11929 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11930 int rb
= 28; /* GP */
11931 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11933 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11937 if (ctx
->opcode
& 1) {
11938 generate_exception(ctx
, EXCP_RI
);
11941 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11942 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11943 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11944 int rd
, rs
, re
, rt
;
11945 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11946 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11947 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11949 rd
= rd_enc
[enc_dest
];
11950 re
= re_enc
[enc_dest
];
11951 rs
= rs_rt_enc
[enc_rs
];
11952 rt
= rs_rt_enc
[enc_rt
];
11954 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11955 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11960 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11961 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11962 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11963 offset
= (offset
== 0xf ? -1 : offset
);
11965 gen_ld(env
, ctx
, OPC_LBU
, rd
, rb
, offset
);
11970 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11971 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11972 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11974 gen_ld(env
, ctx
, OPC_LHU
, rd
, rb
, offset
);
11979 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11980 int rb
= 29; /* SP */
11981 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11983 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11988 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11989 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11990 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11992 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11997 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11998 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11999 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
12001 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
12006 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12007 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12008 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
12010 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
12015 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12016 int rb
= 29; /* SP */
12017 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12019 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12024 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12025 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12026 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12028 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12033 int rd
= uMIPS_RD5(ctx
->opcode
);
12034 int rs
= uMIPS_RS5(ctx
->opcode
);
12036 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
12040 gen_andi16(env
, ctx
);
12043 switch (ctx
->opcode
& 0x1) {
12045 gen_addius5(env
, ctx
);
12048 gen_addiusp(env
, ctx
);
12053 switch (ctx
->opcode
& 0x1) {
12055 gen_addiur2(env
, ctx
);
12058 gen_addiur1sp(env
, ctx
);
12063 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
12064 SIMM(ctx
->opcode
, 0, 10) << 1);
12069 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
12070 mmreg(uMIPS_RD(ctx
->opcode
)),
12071 0, SIMM(ctx
->opcode
, 0, 7) << 1);
12076 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
12077 int imm
= ZIMM(ctx
->opcode
, 0, 7);
12079 imm
= (imm
== 0x7f ? -1 : imm
);
12080 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
12090 generate_exception(ctx
, EXCP_RI
);
12093 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
12100 /* SmartMIPS extension to MIPS32 */
12102 #if defined(TARGET_MIPS64)
12104 /* MDMX extension to MIPS64 */
12108 static void decode_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
12111 int rs
, rt
, rd
, sa
;
12112 uint32_t op
, op1
, op2
;
12115 /* make sure instructions are on a word boundary */
12116 if (ctx
->pc
& 0x3) {
12117 env
->CP0_BadVAddr
= ctx
->pc
;
12118 generate_exception(ctx
, EXCP_AdEL
);
12122 /* Handle blikely not taken case */
12123 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
12124 int l1
= gen_new_label();
12126 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
12127 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12128 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
12129 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
12133 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
12134 tcg_gen_debug_insn_start(ctx
->pc
);
12137 op
= MASK_OP_MAJOR(ctx
->opcode
);
12138 rs
= (ctx
->opcode
>> 21) & 0x1f;
12139 rt
= (ctx
->opcode
>> 16) & 0x1f;
12140 rd
= (ctx
->opcode
>> 11) & 0x1f;
12141 sa
= (ctx
->opcode
>> 6) & 0x1f;
12142 imm
= (int16_t)ctx
->opcode
;
12145 op1
= MASK_SPECIAL(ctx
->opcode
);
12147 case OPC_SLL
: /* Shift with immediate */
12149 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12152 switch ((ctx
->opcode
>> 21) & 0x1f) {
12154 /* rotr is decoded as srl on non-R2 CPUs */
12155 if (env
->insn_flags
& ISA_MIPS32R2
) {
12160 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12163 generate_exception(ctx
, EXCP_RI
);
12167 case OPC_MOVN
: /* Conditional move */
12169 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
12170 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
12171 gen_cond_move(env
, ctx
, op1
, rd
, rs
, rt
);
12173 case OPC_ADD
... OPC_SUBU
:
12174 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12176 case OPC_SLLV
: /* Shifts */
12178 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12181 switch ((ctx
->opcode
>> 6) & 0x1f) {
12183 /* rotrv is decoded as srlv on non-R2 CPUs */
12184 if (env
->insn_flags
& ISA_MIPS32R2
) {
12189 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12192 generate_exception(ctx
, EXCP_RI
);
12196 case OPC_SLT
: /* Set on less than */
12198 gen_slt(env
, ctx
, op1
, rd
, rs
, rt
);
12200 case OPC_AND
: /* Logic*/
12204 gen_logic(env
, ctx
, op1
, rd
, rs
, rt
);
12206 case OPC_MULT
... OPC_DIVU
:
12208 check_insn(env
, ctx
, INSN_VR54XX
);
12209 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
12210 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
12212 gen_muldiv(ctx
, op1
, rs
, rt
);
12214 case OPC_JR
... OPC_JALR
:
12215 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
12218 case OPC_TGE
... OPC_TEQ
: /* Traps */
12220 gen_trap(ctx
, op1
, rs
, rt
, -1);
12222 case OPC_MFHI
: /* Move from HI/LO */
12224 gen_HILO(ctx
, op1
, rd
);
12227 case OPC_MTLO
: /* Move to HI/LO */
12228 gen_HILO(ctx
, op1
, rs
);
12230 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
12231 #ifdef MIPS_STRICT_STANDARD
12232 MIPS_INVAL("PMON / selsl");
12233 generate_exception(ctx
, EXCP_RI
);
12235 gen_helper_0e0i(pmon
, sa
);
12239 generate_exception(ctx
, EXCP_SYSCALL
);
12240 ctx
->bstate
= BS_STOP
;
12243 generate_exception(ctx
, EXCP_BREAK
);
12246 #ifdef MIPS_STRICT_STANDARD
12247 MIPS_INVAL("SPIM");
12248 generate_exception(ctx
, EXCP_RI
);
12250 /* Implemented as RI exception for now. */
12251 MIPS_INVAL("spim (unofficial)");
12252 generate_exception(ctx
, EXCP_RI
);
12256 /* Treat as NOP. */
12260 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12261 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12262 check_cp1_enabled(ctx
);
12263 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
12264 (ctx
->opcode
>> 16) & 1);
12266 generate_exception_err(ctx
, EXCP_CpU
, 1);
12270 #if defined(TARGET_MIPS64)
12271 /* MIPS64 specific opcodes */
12276 check_insn(env
, ctx
, ISA_MIPS3
);
12277 check_mips_64(ctx
);
12278 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12281 switch ((ctx
->opcode
>> 21) & 0x1f) {
12283 /* drotr is decoded as dsrl on non-R2 CPUs */
12284 if (env
->insn_flags
& ISA_MIPS32R2
) {
12289 check_insn(env
, ctx
, ISA_MIPS3
);
12290 check_mips_64(ctx
);
12291 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12294 generate_exception(ctx
, EXCP_RI
);
12299 switch ((ctx
->opcode
>> 21) & 0x1f) {
12301 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
12302 if (env
->insn_flags
& ISA_MIPS32R2
) {
12307 check_insn(env
, ctx
, ISA_MIPS3
);
12308 check_mips_64(ctx
);
12309 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12312 generate_exception(ctx
, EXCP_RI
);
12316 case OPC_DADD
... OPC_DSUBU
:
12317 check_insn(env
, ctx
, ISA_MIPS3
);
12318 check_mips_64(ctx
);
12319 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12323 check_insn(env
, ctx
, ISA_MIPS3
);
12324 check_mips_64(ctx
);
12325 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12328 switch ((ctx
->opcode
>> 6) & 0x1f) {
12330 /* drotrv is decoded as dsrlv on non-R2 CPUs */
12331 if (env
->insn_flags
& ISA_MIPS32R2
) {
12336 check_insn(env
, ctx
, ISA_MIPS3
);
12337 check_mips_64(ctx
);
12338 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12341 generate_exception(ctx
, EXCP_RI
);
12345 case OPC_DMULT
... OPC_DDIVU
:
12346 check_insn(env
, ctx
, ISA_MIPS3
);
12347 check_mips_64(ctx
);
12348 gen_muldiv(ctx
, op1
, rs
, rt
);
12351 default: /* Invalid */
12352 MIPS_INVAL("special");
12353 generate_exception(ctx
, EXCP_RI
);
12358 op1
= MASK_SPECIAL2(ctx
->opcode
);
12360 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
12361 case OPC_MSUB
... OPC_MSUBU
:
12362 check_insn(env
, ctx
, ISA_MIPS32
);
12363 gen_muldiv(ctx
, op1
, rs
, rt
);
12366 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12370 check_insn(env
, ctx
, ISA_MIPS32
);
12371 gen_cl(ctx
, op1
, rd
, rs
);
12374 /* XXX: not clear which exception should be raised
12375 * when in debug mode...
12377 check_insn(env
, ctx
, ISA_MIPS32
);
12378 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
12379 generate_exception(ctx
, EXCP_DBp
);
12381 generate_exception(ctx
, EXCP_DBp
);
12383 /* Treat as NOP. */
12386 case OPC_DIVU_G_2F
:
12387 case OPC_MULT_G_2F
:
12388 case OPC_MULTU_G_2F
:
12390 case OPC_MODU_G_2F
:
12391 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12392 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12394 #if defined(TARGET_MIPS64)
12397 check_insn(env
, ctx
, ISA_MIPS64
);
12398 check_mips_64(ctx
);
12399 gen_cl(ctx
, op1
, rd
, rs
);
12401 case OPC_DMULT_G_2F
:
12402 case OPC_DMULTU_G_2F
:
12403 case OPC_DDIV_G_2F
:
12404 case OPC_DDIVU_G_2F
:
12405 case OPC_DMOD_G_2F
:
12406 case OPC_DMODU_G_2F
:
12407 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12408 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12411 default: /* Invalid */
12412 MIPS_INVAL("special2");
12413 generate_exception(ctx
, EXCP_RI
);
12418 op1
= MASK_SPECIAL3(ctx
->opcode
);
12422 check_insn(env
, ctx
, ISA_MIPS32R2
);
12423 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12426 check_insn(env
, ctx
, ISA_MIPS32R2
);
12427 op2
= MASK_BSHFL(ctx
->opcode
);
12428 gen_bshfl(ctx
, op2
, rt
, rd
);
12431 gen_rdhwr(env
, ctx
, rt
, rd
);
12434 check_insn(env
, ctx
, ASE_MT
);
12436 TCGv t0
= tcg_temp_new();
12437 TCGv t1
= tcg_temp_new();
12439 gen_load_gpr(t0
, rt
);
12440 gen_load_gpr(t1
, rs
);
12441 gen_helper_fork(t0
, t1
);
12447 check_insn(env
, ctx
, ASE_MT
);
12449 TCGv t0
= tcg_temp_new();
12451 save_cpu_state(ctx
, 1);
12452 gen_load_gpr(t0
, rs
);
12453 gen_helper_yield(t0
, cpu_env
, t0
);
12454 gen_store_gpr(t0
, rd
);
12458 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
12459 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
12460 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
12461 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12462 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12464 #if defined(TARGET_MIPS64)
12465 case OPC_DEXTM
... OPC_DEXT
:
12466 case OPC_DINSM
... OPC_DINS
:
12467 check_insn(env
, ctx
, ISA_MIPS64R2
);
12468 check_mips_64(ctx
);
12469 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12472 check_insn(env
, ctx
, ISA_MIPS64R2
);
12473 check_mips_64(ctx
);
12474 op2
= MASK_DBSHFL(ctx
->opcode
);
12475 gen_bshfl(ctx
, op2
, rt
, rd
);
12477 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
12478 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
12479 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
12480 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12481 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12484 default: /* Invalid */
12485 MIPS_INVAL("special3");
12486 generate_exception(ctx
, EXCP_RI
);
12491 op1
= MASK_REGIMM(ctx
->opcode
);
12493 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
12494 case OPC_BLTZAL
... OPC_BGEZALL
:
12495 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
12498 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
12500 gen_trap(ctx
, op1
, rs
, -1, imm
);
12503 check_insn(env
, ctx
, ISA_MIPS32R2
);
12504 /* Treat as NOP. */
12506 default: /* Invalid */
12507 MIPS_INVAL("regimm");
12508 generate_exception(ctx
, EXCP_RI
);
12513 check_cp0_enabled(ctx
);
12514 op1
= MASK_CP0(ctx
->opcode
);
12520 #if defined(TARGET_MIPS64)
12524 #ifndef CONFIG_USER_ONLY
12525 gen_cp0(env
, ctx
, op1
, rt
, rd
);
12526 #endif /* !CONFIG_USER_ONLY */
12528 case OPC_C0_FIRST
... OPC_C0_LAST
:
12529 #ifndef CONFIG_USER_ONLY
12530 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
12531 #endif /* !CONFIG_USER_ONLY */
12534 #ifndef CONFIG_USER_ONLY
12536 TCGv t0
= tcg_temp_new();
12538 op2
= MASK_MFMC0(ctx
->opcode
);
12541 check_insn(env
, ctx
, ASE_MT
);
12542 gen_helper_dmt(t0
);
12543 gen_store_gpr(t0
, rt
);
12546 check_insn(env
, ctx
, ASE_MT
);
12547 gen_helper_emt(t0
);
12548 gen_store_gpr(t0
, rt
);
12551 check_insn(env
, ctx
, ASE_MT
);
12552 gen_helper_dvpe(t0
, cpu_env
);
12553 gen_store_gpr(t0
, rt
);
12556 check_insn(env
, ctx
, ASE_MT
);
12557 gen_helper_evpe(t0
, cpu_env
);
12558 gen_store_gpr(t0
, rt
);
12561 check_insn(env
, ctx
, ISA_MIPS32R2
);
12562 save_cpu_state(ctx
, 1);
12563 gen_helper_di(t0
, cpu_env
);
12564 gen_store_gpr(t0
, rt
);
12565 /* Stop translation as we may have switched the execution mode */
12566 ctx
->bstate
= BS_STOP
;
12569 check_insn(env
, ctx
, ISA_MIPS32R2
);
12570 save_cpu_state(ctx
, 1);
12571 gen_helper_ei(t0
, cpu_env
);
12572 gen_store_gpr(t0
, rt
);
12573 /* Stop translation as we may have switched the execution mode */
12574 ctx
->bstate
= BS_STOP
;
12576 default: /* Invalid */
12577 MIPS_INVAL("mfmc0");
12578 generate_exception(ctx
, EXCP_RI
);
12583 #endif /* !CONFIG_USER_ONLY */
12586 check_insn(env
, ctx
, ISA_MIPS32R2
);
12587 gen_load_srsgpr(rt
, rd
);
12590 check_insn(env
, ctx
, ISA_MIPS32R2
);
12591 gen_store_srsgpr(rt
, rd
);
12595 generate_exception(ctx
, EXCP_RI
);
12599 case OPC_ADDI
: /* Arithmetic with immediate opcode */
12601 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12603 case OPC_SLTI
: /* Set on less than with immediate opcode */
12605 gen_slt_imm(env
, ctx
, op
, rt
, rs
, imm
);
12607 case OPC_ANDI
: /* Arithmetic with immediate opcode */
12611 gen_logic_imm(env
, ctx
, op
, rt
, rs
, imm
);
12613 case OPC_J
... OPC_JAL
: /* Jump */
12614 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12615 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12618 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
12619 case OPC_BEQL
... OPC_BGTZL
:
12620 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
12623 case OPC_LB
... OPC_LWR
: /* Load and stores */
12625 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12627 case OPC_SB
... OPC_SW
:
12629 gen_st(ctx
, op
, rt
, rs
, imm
);
12632 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12635 check_cp0_enabled(ctx
);
12636 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
12637 /* Treat as NOP. */
12640 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12641 /* Treat as NOP. */
12644 /* Floating point (COP1). */
12649 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
12653 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12654 check_cp1_enabled(ctx
);
12655 op1
= MASK_CP1(ctx
->opcode
);
12659 check_insn(env
, ctx
, ISA_MIPS32R2
);
12664 gen_cp1(ctx
, op1
, rt
, rd
);
12666 #if defined(TARGET_MIPS64)
12669 check_insn(env
, ctx
, ISA_MIPS3
);
12670 gen_cp1(ctx
, op1
, rt
, rd
);
12676 check_insn(env
, ctx
, ASE_MIPS3D
);
12679 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
12680 (rt
>> 2) & 0x7, imm
<< 2);
12688 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
12693 generate_exception (ctx
, EXCP_RI
);
12697 generate_exception_err(ctx
, EXCP_CpU
, 1);
12706 /* COP2: Not implemented. */
12707 generate_exception_err(ctx
, EXCP_CpU
, 2);
12710 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12711 /* Note that these instructions use different fields. */
12712 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
12716 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12717 check_cp1_enabled(ctx
);
12718 op1
= MASK_CP3(ctx
->opcode
);
12726 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
12729 /* Treat as NOP. */
12744 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
12748 generate_exception (ctx
, EXCP_RI
);
12752 generate_exception_err(ctx
, EXCP_CpU
, 1);
12756 #if defined(TARGET_MIPS64)
12757 /* MIPS64 opcodes */
12759 case OPC_LDL
... OPC_LDR
:
12762 check_insn(env
, ctx
, ISA_MIPS3
);
12763 check_mips_64(ctx
);
12764 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12766 case OPC_SDL
... OPC_SDR
:
12768 check_insn(env
, ctx
, ISA_MIPS3
);
12769 check_mips_64(ctx
);
12770 gen_st(ctx
, op
, rt
, rs
, imm
);
12773 check_insn(env
, ctx
, ISA_MIPS3
);
12774 check_mips_64(ctx
);
12775 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12779 check_insn(env
, ctx
, ISA_MIPS3
);
12780 check_mips_64(ctx
);
12781 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12785 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
12786 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12787 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12791 check_insn(env
, ctx
, ASE_MDMX
);
12792 /* MDMX: Not implemented. */
12793 default: /* Invalid */
12794 MIPS_INVAL("major opcode");
12795 generate_exception(ctx
, EXCP_RI
);
12801 gen_intermediate_code_internal (CPUMIPSState
*env
, TranslationBlock
*tb
,
12805 target_ulong pc_start
;
12806 uint16_t *gen_opc_end
;
12815 qemu_log("search pc %d\n", search_pc
);
12818 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
12821 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
12823 ctx
.bstate
= BS_NONE
;
12824 /* Restore delay slot state from the tb context. */
12825 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
12826 restore_cpu_state(env
, &ctx
);
12827 #ifdef CONFIG_USER_ONLY
12828 ctx
.mem_idx
= MIPS_HFLAG_UM
;
12830 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
12833 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
12834 if (max_insns
== 0)
12835 max_insns
= CF_COUNT_MASK
;
12836 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
12837 gen_icount_start();
12838 while (ctx
.bstate
== BS_NONE
) {
12839 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
12840 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
12841 if (bp
->pc
== ctx
.pc
) {
12842 save_cpu_state(&ctx
, 1);
12843 ctx
.bstate
= BS_BRANCH
;
12844 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12845 /* Include the breakpoint location or the tb won't
12846 * be flushed when it must be. */
12848 goto done_generating
;
12854 j
= gen_opc_ptr
- gen_opc_buf
;
12858 gen_opc_instr_start
[lj
++] = 0;
12860 gen_opc_pc
[lj
] = ctx
.pc
;
12861 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12862 gen_opc_instr_start
[lj
] = 1;
12863 gen_opc_icount
[lj
] = num_insns
;
12865 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12869 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12870 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
12872 decode_opc(env
, &ctx
, &is_branch
);
12873 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12874 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12875 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12876 } else if (env
->insn_flags
& ASE_MIPS16
) {
12877 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12878 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12880 generate_exception(&ctx
, EXCP_RI
);
12881 ctx
.bstate
= BS_STOP
;
12885 handle_delay_slot(env
, &ctx
, insn_bytes
);
12887 ctx
.pc
+= insn_bytes
;
12891 /* Execute a branch and its delay slot as a single instruction.
12892 This is what GDB expects and is consistent with what the
12893 hardware does (e.g. if a delay slot instruction faults, the
12894 reported PC is the PC of the branch). */
12895 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12898 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12901 if (gen_opc_ptr
>= gen_opc_end
)
12904 if (num_insns
>= max_insns
)
12910 if (tb
->cflags
& CF_LAST_IO
)
12912 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12913 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12914 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12916 switch (ctx
.bstate
) {
12918 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12921 save_cpu_state(&ctx
, 0);
12922 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12925 tcg_gen_exit_tb(0);
12933 gen_icount_end(tb
, num_insns
);
12934 *gen_opc_ptr
= INDEX_op_end
;
12936 j
= gen_opc_ptr
- gen_opc_buf
;
12939 gen_opc_instr_start
[lj
++] = 0;
12941 tb
->size
= ctx
.pc
- pc_start
;
12942 tb
->icount
= num_insns
;
12946 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12947 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12948 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12954 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12956 gen_intermediate_code_internal(env
, tb
, 0);
12959 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12961 gen_intermediate_code_internal(env
, tb
, 1);
12964 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
12968 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12970 #define printfpr(fp) \
12973 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12974 " fd:%13g fs:%13g psu: %13g\n", \
12975 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12976 (double)(fp)->fd, \
12977 (double)(fp)->fs[FP_ENDIAN_IDX], \
12978 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12981 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
12982 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
12983 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12984 " fd:%13g fs:%13g psu:%13g\n", \
12985 tmp.w[FP_ENDIAN_IDX], tmp.d, \
12987 (double)tmp.fs[FP_ENDIAN_IDX], \
12988 (double)tmp.fs[!FP_ENDIAN_IDX]); \
12993 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
12994 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
12995 get_float_exception_flags(&env
->active_fpu
.fp_status
));
12996 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
12997 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
12998 printfpr(&env
->active_fpu
.fpr
[i
]);
13004 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
13005 /* Debug help: The architecture requires 32bit code to maintain proper
13006 sign-extended values on 64bit machines. */
13008 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
13011 cpu_mips_check_sign_extensions (CPUMIPSState
*env
, FILE *f
,
13012 fprintf_function cpu_fprintf
,
13017 if (!SIGN_EXT_P(env
->active_tc
.PC
))
13018 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
13019 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
13020 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
13021 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
13022 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
13023 if (!SIGN_EXT_P(env
->btarget
))
13024 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
13026 for (i
= 0; i
< 32; i
++) {
13027 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
13028 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
13031 if (!SIGN_EXT_P(env
->CP0_EPC
))
13032 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
13033 if (!SIGN_EXT_P(env
->lladdr
))
13034 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
13038 void cpu_dump_state (CPUMIPSState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
13043 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
13044 " LO=0x" TARGET_FMT_lx
" ds %04x "
13045 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
13046 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
13047 env
->hflags
, env
->btarget
, env
->bcond
);
13048 for (i
= 0; i
< 32; i
++) {
13050 cpu_fprintf(f
, "GPR%02d:", i
);
13051 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
13053 cpu_fprintf(f
, "\n");
13056 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
13057 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
13058 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
13059 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
13060 if (env
->hflags
& MIPS_HFLAG_FPU
)
13061 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
13062 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
13063 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
13067 static void mips_tcg_init(void)
13072 /* Initialize various static tables. */
13076 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
13077 TCGV_UNUSED(cpu_gpr
[0]);
13078 for (i
= 1; i
< 32; i
++)
13079 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
13080 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
13083 for (i
= 0; i
< 32; i
++) {
13084 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
]);
13085 fpu_f64
[i
] = tcg_global_mem_new_i64(TCG_AREG0
, off
, fregnames
[i
]);
13088 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
13089 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
13090 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
13091 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
13092 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
13094 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
13095 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
13097 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
13098 offsetof(CPUMIPSState
, active_tc
.ACX
[i
]),
13101 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
13102 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
13104 bcond
= tcg_global_mem_new(TCG_AREG0
,
13105 offsetof(CPUMIPSState
, bcond
), "bcond");
13106 btarget
= tcg_global_mem_new(TCG_AREG0
,
13107 offsetof(CPUMIPSState
, btarget
), "btarget");
13108 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
13109 offsetof(CPUMIPSState
, hflags
), "hflags");
13111 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
13112 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
13114 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
13115 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
13118 /* register helpers */
13119 #define GEN_HELPER 2
13120 #include "helper.h"
13125 #include "translate_init.c"
13127 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
13131 const mips_def_t
*def
;
13133 def
= cpu_mips_find_by_name(cpu_model
);
13136 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
13138 env
->cpu_model
= def
;
13139 env
->cpu_model_str
= cpu_model
;
13141 #ifndef CONFIG_USER_ONLY
13142 mmu_init(env
, def
);
13144 fpu_init(env
, def
);
13145 mvp_init(env
, def
);
13147 cpu_reset(CPU(cpu
));
13148 qemu_init_vcpu(env
);
13152 void cpu_state_reset(CPUMIPSState
*env
)
13154 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
13155 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
13156 log_cpu_state(env
, 0);
13159 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
13162 /* Reset registers to their default values */
13163 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
13164 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
13165 #ifdef TARGET_WORDS_BIGENDIAN
13166 env
->CP0_Config0
|= (1 << CP0C0_BE
);
13168 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
13169 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
13170 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
13171 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
13172 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
13173 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
13174 << env
->cpu_model
->CP0_LLAddr_shift
;
13175 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
13176 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
13177 env
->CCRes
= env
->cpu_model
->CCRes
;
13178 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
13179 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
13180 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
13181 env
->current_tc
= 0;
13182 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
13183 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
13184 #if defined(TARGET_MIPS64)
13185 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
13186 env
->SEGMask
|= 3ULL << 62;
13189 env
->PABITS
= env
->cpu_model
->PABITS
;
13190 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
13191 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
13192 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
13193 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
13194 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
13195 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
13196 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
13197 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
13198 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
13199 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
13200 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
13201 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
13202 env
->insn_flags
= env
->cpu_model
->insn_flags
;
13204 #if defined(CONFIG_USER_ONLY)
13205 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
13206 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
13207 hardware registers. */
13208 env
->CP0_HWREna
|= 0x0000000F;
13209 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
13210 env
->CP0_Status
|= (1 << CP0St_CU1
);
13213 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
13214 /* If the exception was raised from a delay slot,
13215 come back to the jump. */
13216 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
13218 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
13220 env
->active_tc
.PC
= (int32_t)0xBFC00000;
13221 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
13222 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
13223 env
->CP0_Wired
= 0;
13224 env
->CP0_EBase
= 0x80000000 | (env
->cpu_index
& 0x3FF);
13225 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
13226 /* vectored interrupts not implemented, timer on int 7,
13227 no performance counters. */
13228 env
->CP0_IntCtl
= 0xe0000000;
13232 for (i
= 0; i
< 7; i
++) {
13233 env
->CP0_WatchLo
[i
] = 0;
13234 env
->CP0_WatchHi
[i
] = 0x80000000;
13236 env
->CP0_WatchLo
[7] = 0;
13237 env
->CP0_WatchHi
[7] = 0;
13239 /* Count register increments in debug mode, EJTAG version 1 */
13240 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
13242 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
13245 /* Only TC0 on VPE 0 starts as active. */
13246 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
13247 env
->tcs
[i
].CP0_TCBind
= env
->cpu_index
<< CP0TCBd_CurVPE
;
13248 env
->tcs
[i
].CP0_TCHalt
= 1;
13250 env
->active_tc
.CP0_TCHalt
= 1;
13253 if (!env
->cpu_index
) {
13254 /* VPE0 starts up enabled. */
13255 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
13256 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
13258 /* TC0 starts up unhalted. */
13260 env
->active_tc
.CP0_TCHalt
= 0;
13261 env
->tcs
[0].CP0_TCHalt
= 0;
13262 /* With thread 0 active. */
13263 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
13264 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
13268 compute_hflags(env
);
13269 env
->exception_index
= EXCP_NONE
;
13272 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
13274 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
13275 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
13276 env
->hflags
|= gen_opc_hflags
[pc_pos
];