2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
31 #define MIPS_DEBUG_DISAS 0
32 //#define MIPS_DEBUG_SIGN_EXTENSIONS
34 /* MIPS major opcodes */
35 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
38 /* indirect opcode tables */
39 OPC_SPECIAL
= (0x00 << 26),
40 OPC_REGIMM
= (0x01 << 26),
41 OPC_CP0
= (0x10 << 26),
42 OPC_CP1
= (0x11 << 26),
43 OPC_CP2
= (0x12 << 26),
44 OPC_CP3
= (0x13 << 26),
45 OPC_SPECIAL2
= (0x1C << 26),
46 OPC_SPECIAL3
= (0x1F << 26),
47 /* arithmetic with immediate */
48 OPC_ADDI
= (0x08 << 26),
49 OPC_ADDIU
= (0x09 << 26),
50 OPC_SLTI
= (0x0A << 26),
51 OPC_SLTIU
= (0x0B << 26),
52 /* logic with immediate */
53 OPC_ANDI
= (0x0C << 26),
54 OPC_ORI
= (0x0D << 26),
55 OPC_XORI
= (0x0E << 26),
56 OPC_LUI
= (0x0F << 26),
57 /* arithmetic with immediate */
58 OPC_DADDI
= (0x18 << 26),
59 OPC_DADDIU
= (0x19 << 26),
60 /* Jump and branches */
62 OPC_JAL
= (0x03 << 26),
63 OPC_JALS
= OPC_JAL
| 0x5,
64 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
65 OPC_BEQL
= (0x14 << 26),
66 OPC_BNE
= (0x05 << 26),
67 OPC_BNEL
= (0x15 << 26),
68 OPC_BLEZ
= (0x06 << 26),
69 OPC_BLEZL
= (0x16 << 26),
70 OPC_BGTZ
= (0x07 << 26),
71 OPC_BGTZL
= (0x17 << 26),
72 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
73 OPC_JALXS
= OPC_JALX
| 0x5,
75 OPC_LDL
= (0x1A << 26),
76 OPC_LDR
= (0x1B << 26),
77 OPC_LB
= (0x20 << 26),
78 OPC_LH
= (0x21 << 26),
79 OPC_LWL
= (0x22 << 26),
80 OPC_LW
= (0x23 << 26),
81 OPC_LWPC
= OPC_LW
| 0x5,
82 OPC_LBU
= (0x24 << 26),
83 OPC_LHU
= (0x25 << 26),
84 OPC_LWR
= (0x26 << 26),
85 OPC_LWU
= (0x27 << 26),
86 OPC_SB
= (0x28 << 26),
87 OPC_SH
= (0x29 << 26),
88 OPC_SWL
= (0x2A << 26),
89 OPC_SW
= (0x2B << 26),
90 OPC_SDL
= (0x2C << 26),
91 OPC_SDR
= (0x2D << 26),
92 OPC_SWR
= (0x2E << 26),
93 OPC_LL
= (0x30 << 26),
94 OPC_LLD
= (0x34 << 26),
95 OPC_LD
= (0x37 << 26),
96 OPC_LDPC
= OPC_LD
| 0x5,
97 OPC_SC
= (0x38 << 26),
98 OPC_SCD
= (0x3C << 26),
99 OPC_SD
= (0x3F << 26),
100 /* Floating point load/store */
101 OPC_LWC1
= (0x31 << 26),
102 OPC_LWC2
= (0x32 << 26),
103 OPC_LDC1
= (0x35 << 26),
104 OPC_LDC2
= (0x36 << 26),
105 OPC_SWC1
= (0x39 << 26),
106 OPC_SWC2
= (0x3A << 26),
107 OPC_SDC1
= (0x3D << 26),
108 OPC_SDC2
= (0x3E << 26),
109 /* MDMX ASE specific */
110 OPC_MDMX
= (0x1E << 26),
111 /* Cache and prefetch */
112 OPC_CACHE
= (0x2F << 26),
113 OPC_PREF
= (0x33 << 26),
114 /* Reserved major opcode */
115 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
118 /* MIPS special opcodes */
119 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
123 OPC_SLL
= 0x00 | OPC_SPECIAL
,
124 /* NOP is SLL r0, r0, 0 */
125 /* SSNOP is SLL r0, r0, 1 */
126 /* EHB is SLL r0, r0, 3 */
127 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
128 OPC_ROTR
= OPC_SRL
| (1 << 21),
129 OPC_SRA
= 0x03 | OPC_SPECIAL
,
130 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
131 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
132 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
133 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
134 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
135 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
136 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DROTR
= OPC_DSRL
| (1 << 21),
141 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
142 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
143 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
144 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
145 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
146 /* Multiplication / division */
147 OPC_MULT
= 0x18 | OPC_SPECIAL
,
148 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
149 OPC_DIV
= 0x1A | OPC_SPECIAL
,
150 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
151 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
152 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
153 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
154 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
155 /* 2 registers arithmetic / logic */
156 OPC_ADD
= 0x20 | OPC_SPECIAL
,
157 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
158 OPC_SUB
= 0x22 | OPC_SPECIAL
,
159 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
160 OPC_AND
= 0x24 | OPC_SPECIAL
,
161 OPC_OR
= 0x25 | OPC_SPECIAL
,
162 OPC_XOR
= 0x26 | OPC_SPECIAL
,
163 OPC_NOR
= 0x27 | OPC_SPECIAL
,
164 OPC_SLT
= 0x2A | OPC_SPECIAL
,
165 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
166 OPC_DADD
= 0x2C | OPC_SPECIAL
,
167 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
168 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
169 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
171 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
172 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
173 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
174 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
176 OPC_TGE
= 0x30 | OPC_SPECIAL
,
177 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
178 OPC_TLT
= 0x32 | OPC_SPECIAL
,
179 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
180 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
181 OPC_TNE
= 0x36 | OPC_SPECIAL
,
182 /* HI / LO registers load & stores */
183 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
184 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
185 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
186 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
187 /* Conditional moves */
188 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
189 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
191 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
194 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
195 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
196 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
197 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
198 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
200 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
201 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
202 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
203 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
204 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
205 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
206 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
209 /* Multiplication variants of the vr54xx. */
210 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
213 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
214 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
216 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
218 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
219 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
220 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
221 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
222 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
223 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
224 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
225 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
226 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
229 /* REGIMM (rt field) opcodes */
230 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
233 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
234 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
235 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
236 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
237 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
238 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
239 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
240 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
241 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
242 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
243 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
244 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
245 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
246 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
247 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
248 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
249 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
252 /* Special2 opcodes */
253 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
256 /* Multiply & xxx operations */
257 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
258 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
259 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
260 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
261 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
263 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
264 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
265 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
266 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
267 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
268 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
269 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
270 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
271 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
272 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
273 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
274 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
276 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
277 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
278 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
279 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
281 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
284 /* Special3 opcodes */
285 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
288 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
289 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
290 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
291 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
292 OPC_INS
= 0x04 | OPC_SPECIAL3
,
293 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
294 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
295 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
296 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
297 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
298 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
299 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
300 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
303 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
304 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
305 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
306 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
307 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
308 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
309 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
310 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
311 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
312 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
313 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
314 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
318 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
321 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
322 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
323 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
327 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
330 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
331 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
334 /* Coprocessor 0 (rs field) */
335 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
338 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
339 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
340 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
341 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
342 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
343 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
344 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
345 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
346 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
347 OPC_C0
= (0x10 << 21) | OPC_CP0
,
348 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
349 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
353 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
356 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
357 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
358 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
359 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
360 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
361 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
364 /* Coprocessor 0 (with rs == C0) */
365 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
368 OPC_TLBR
= 0x01 | OPC_C0
,
369 OPC_TLBWI
= 0x02 | OPC_C0
,
370 OPC_TLBWR
= 0x06 | OPC_C0
,
371 OPC_TLBP
= 0x08 | OPC_C0
,
372 OPC_RFE
= 0x10 | OPC_C0
,
373 OPC_ERET
= 0x18 | OPC_C0
,
374 OPC_DERET
= 0x1F | OPC_C0
,
375 OPC_WAIT
= 0x20 | OPC_C0
,
378 /* Coprocessor 1 (rs field) */
379 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
381 /* Values for the fmt field in FP instructions */
383 /* 0 - 15 are reserved */
384 FMT_S
= 16, /* single fp */
385 FMT_D
= 17, /* double fp */
386 FMT_E
= 18, /* extended fp */
387 FMT_Q
= 19, /* quad fp */
388 FMT_W
= 20, /* 32-bit fixed */
389 FMT_L
= 21, /* 64-bit fixed */
390 FMT_PS
= 22, /* paired single fp */
391 /* 23 - 31 are reserved */
395 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
396 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
397 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
398 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
399 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
400 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
401 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
402 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
403 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
404 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
405 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
406 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
407 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
408 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
409 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
410 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
411 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
412 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
415 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
416 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
419 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
420 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
421 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
422 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
426 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
427 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
431 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
432 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
435 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
438 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
439 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
440 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
441 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
442 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
443 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
444 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
445 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
446 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
449 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
452 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
453 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
454 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
455 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
456 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
457 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
458 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
459 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
461 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
462 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
463 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
464 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
465 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
466 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
467 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
468 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
470 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
471 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
472 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
473 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
474 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
475 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
476 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
477 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
479 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
480 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
481 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
482 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
483 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
484 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
485 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
486 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
488 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
489 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
490 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
491 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
492 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
493 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
495 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
496 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
497 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
498 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
499 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
500 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
502 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
503 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
504 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
505 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
506 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
507 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
509 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
510 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
511 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
512 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
513 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
514 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
516 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
517 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
518 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
519 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
520 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
521 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
523 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
524 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
525 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
526 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
527 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
528 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
530 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
531 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
532 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
533 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
534 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
535 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
537 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
538 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
539 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
540 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
541 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
542 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
546 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
549 OPC_LWXC1
= 0x00 | OPC_CP3
,
550 OPC_LDXC1
= 0x01 | OPC_CP3
,
551 OPC_LUXC1
= 0x05 | OPC_CP3
,
552 OPC_SWXC1
= 0x08 | OPC_CP3
,
553 OPC_SDXC1
= 0x09 | OPC_CP3
,
554 OPC_SUXC1
= 0x0D | OPC_CP3
,
555 OPC_PREFX
= 0x0F | OPC_CP3
,
556 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
557 OPC_MADD_S
= 0x20 | OPC_CP3
,
558 OPC_MADD_D
= 0x21 | OPC_CP3
,
559 OPC_MADD_PS
= 0x26 | OPC_CP3
,
560 OPC_MSUB_S
= 0x28 | OPC_CP3
,
561 OPC_MSUB_D
= 0x29 | OPC_CP3
,
562 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
563 OPC_NMADD_S
= 0x30 | OPC_CP3
,
564 OPC_NMADD_D
= 0x31 | OPC_CP3
,
565 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
566 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
567 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
568 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
571 /* global register indices */
572 static TCGv_ptr cpu_env
;
573 static TCGv cpu_gpr
[32], cpu_PC
;
574 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
575 static TCGv cpu_dspctrl
, btarget
, bcond
;
576 static TCGv_i32 hflags
;
577 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
578 static TCGv_i64 fpu_f64
[32];
580 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
582 #include "gen-icount.h"
584 #define gen_helper_0e0i(name, arg) do { \
585 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
586 gen_helper_##name(cpu_env, helper_tmp); \
587 tcg_temp_free_i32(helper_tmp); \
590 #define gen_helper_0e1i(name, arg1, arg2) do { \
591 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
592 gen_helper_##name(cpu_env, arg1, helper_tmp); \
593 tcg_temp_free_i32(helper_tmp); \
596 #define gen_helper_1e0i(name, ret, arg1) do { \
597 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
598 gen_helper_##name(ret, cpu_env, helper_tmp); \
599 tcg_temp_free_i32(helper_tmp); \
602 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
603 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
604 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
605 tcg_temp_free_i32(helper_tmp); \
608 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
609 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
610 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
611 tcg_temp_free_i32(helper_tmp); \
614 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
615 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
616 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
617 tcg_temp_free_i32(helper_tmp); \
620 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
621 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
622 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
623 tcg_temp_free_i32(helper_tmp); \
626 typedef struct DisasContext
{
627 struct TranslationBlock
*tb
;
628 target_ulong pc
, saved_pc
;
630 int singlestep_enabled
;
631 /* Routine used to access memory */
633 uint32_t hflags
, saved_hflags
;
635 target_ulong btarget
;
639 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
640 * exception condition */
641 BS_STOP
= 1, /* We want to stop translation for any reason */
642 BS_BRANCH
= 2, /* We reached a branch condition */
643 BS_EXCP
= 3, /* We reached an exception condition */
646 static const char * const regnames
[] = {
647 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
648 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
649 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
650 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
653 static const char * const regnames_HI
[] = {
654 "HI0", "HI1", "HI2", "HI3",
657 static const char * const regnames_LO
[] = {
658 "LO0", "LO1", "LO2", "LO3",
661 static const char * const regnames_ACX
[] = {
662 "ACX0", "ACX1", "ACX2", "ACX3",
665 static const char * const fregnames
[] = {
666 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
667 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
668 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
669 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
672 #define MIPS_DEBUG(fmt, ...) \
674 if (MIPS_DEBUG_DISAS) { \
675 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
676 TARGET_FMT_lx ": %08x " fmt "\n", \
677 ctx->pc, ctx->opcode , ## __VA_ARGS__); \
681 #define LOG_DISAS(...) \
683 if (MIPS_DEBUG_DISAS) { \
684 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
688 #define MIPS_INVAL(op) \
689 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
690 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F))
692 /* General purpose registers moves. */
693 static inline void gen_load_gpr (TCGv t
, int reg
)
696 tcg_gen_movi_tl(t
, 0);
698 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
701 static inline void gen_store_gpr (TCGv t
, int reg
)
704 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
707 /* Moves to/from ACX register. */
708 static inline void gen_load_ACX (TCGv t
, int reg
)
710 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
713 static inline void gen_store_ACX (TCGv t
, int reg
)
715 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
718 /* Moves to/from shadow registers. */
719 static inline void gen_load_srsgpr (int from
, int to
)
721 TCGv t0
= tcg_temp_new();
724 tcg_gen_movi_tl(t0
, 0);
726 TCGv_i32 t2
= tcg_temp_new_i32();
727 TCGv_ptr addr
= tcg_temp_new_ptr();
729 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
730 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
731 tcg_gen_andi_i32(t2
, t2
, 0xf);
732 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
733 tcg_gen_ext_i32_ptr(addr
, t2
);
734 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
736 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
737 tcg_temp_free_ptr(addr
);
738 tcg_temp_free_i32(t2
);
740 gen_store_gpr(t0
, to
);
744 static inline void gen_store_srsgpr (int from
, int to
)
747 TCGv t0
= tcg_temp_new();
748 TCGv_i32 t2
= tcg_temp_new_i32();
749 TCGv_ptr addr
= tcg_temp_new_ptr();
751 gen_load_gpr(t0
, from
);
752 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
753 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
754 tcg_gen_andi_i32(t2
, t2
, 0xf);
755 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
756 tcg_gen_ext_i32_ptr(addr
, t2
);
757 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
759 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
760 tcg_temp_free_ptr(addr
);
761 tcg_temp_free_i32(t2
);
766 /* Floating point register moves. */
767 static void gen_load_fpr32(TCGv_i32 t
, int reg
)
769 tcg_gen_trunc_i64_i32(t
, fpu_f64
[reg
]);
772 static void gen_store_fpr32(TCGv_i32 t
, int reg
)
774 TCGv_i64 t64
= tcg_temp_new_i64();
775 tcg_gen_extu_i32_i64(t64
, t
);
776 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
777 tcg_temp_free_i64(t64
);
780 static void gen_load_fpr32h(TCGv_i32 t
, int reg
)
782 TCGv_i64 t64
= tcg_temp_new_i64();
783 tcg_gen_shri_i64(t64
, fpu_f64
[reg
], 32);
784 tcg_gen_trunc_i64_i32(t
, t64
);
785 tcg_temp_free_i64(t64
);
788 static void gen_store_fpr32h(TCGv_i32 t
, int reg
)
790 TCGv_i64 t64
= tcg_temp_new_i64();
791 tcg_gen_extu_i32_i64(t64
, t
);
792 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
793 tcg_temp_free_i64(t64
);
796 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
798 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
799 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
801 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
805 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
807 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
808 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
811 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
812 t0
= tcg_temp_new_i64();
813 tcg_gen_shri_i64(t0
, t
, 32);
814 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
815 tcg_temp_free_i64(t0
);
819 static inline int get_fp_bit (int cc
)
828 static inline void gen_save_pc(target_ulong pc
)
830 tcg_gen_movi_tl(cpu_PC
, pc
);
833 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
835 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
836 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
837 gen_save_pc(ctx
->pc
);
838 ctx
->saved_pc
= ctx
->pc
;
840 if (ctx
->hflags
!= ctx
->saved_hflags
) {
841 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
842 ctx
->saved_hflags
= ctx
->hflags
;
843 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
849 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
855 static inline void restore_cpu_state (CPUMIPSState
*env
, DisasContext
*ctx
)
857 ctx
->saved_hflags
= ctx
->hflags
;
858 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
864 ctx
->btarget
= env
->btarget
;
870 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
872 TCGv_i32 texcp
= tcg_const_i32(excp
);
873 TCGv_i32 terr
= tcg_const_i32(err
);
874 save_cpu_state(ctx
, 1);
875 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
876 tcg_temp_free_i32(terr
);
877 tcg_temp_free_i32(texcp
);
881 generate_exception (DisasContext
*ctx
, int excp
)
883 save_cpu_state(ctx
, 1);
884 gen_helper_0e0i(raise_exception
, excp
);
887 /* Addresses computation */
888 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
890 tcg_gen_add_tl(ret
, arg0
, arg1
);
892 #if defined(TARGET_MIPS64)
893 /* For compatibility with 32-bit code, data reference in user mode
894 with Status_UX = 0 should be casted to 32-bit and sign extended.
895 See the MIPS64 PRA manual, section 4.10. */
896 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
897 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
898 tcg_gen_ext32s_i64(ret
, ret
);
903 static inline void check_cp0_enabled(DisasContext
*ctx
)
905 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
906 generate_exception_err(ctx
, EXCP_CpU
, 0);
909 static inline void check_cp1_enabled(DisasContext
*ctx
)
911 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
912 generate_exception_err(ctx
, EXCP_CpU
, 1);
915 /* Verify that the processor is running with COP1X instructions enabled.
916 This is associated with the nabla symbol in the MIPS32 and MIPS64
919 static inline void check_cop1x(DisasContext
*ctx
)
921 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
922 generate_exception(ctx
, EXCP_RI
);
925 /* Verify that the processor is running with 64-bit floating-point
926 operations enabled. */
928 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
930 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
931 generate_exception(ctx
, EXCP_RI
);
935 * Verify if floating point register is valid; an operation is not defined
936 * if bit 0 of any register specification is set and the FR bit in the
937 * Status register equals zero, since the register numbers specify an
938 * even-odd pair of adjacent coprocessor general registers. When the FR bit
939 * in the Status register equals one, both even and odd register numbers
940 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
942 * Multiple 64 bit wide registers can be checked by calling
943 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
945 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
947 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
948 generate_exception(ctx
, EXCP_RI
);
951 /* Verify that the processor is running with DSP instructions enabled.
952 This is enabled by CP0 Status register MX(24) bit.
955 static inline void check_dsp(DisasContext
*ctx
)
957 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
958 generate_exception(ctx
, EXCP_DSPDIS
);
962 static inline void check_dspr2(DisasContext
*ctx
)
964 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
965 generate_exception(ctx
, EXCP_DSPDIS
);
969 /* This code generates a "reserved instruction" exception if the
970 CPU does not support the instruction set corresponding to flags. */
971 static inline void check_insn(CPUMIPSState
*env
, DisasContext
*ctx
, int flags
)
973 if (unlikely(!(env
->insn_flags
& flags
)))
974 generate_exception(ctx
, EXCP_RI
);
977 /* This code generates a "reserved instruction" exception if 64-bit
978 instructions are not enabled. */
979 static inline void check_mips_64(DisasContext
*ctx
)
981 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
982 generate_exception(ctx
, EXCP_RI
);
985 /* Define small wrappers for gen_load_fpr* so that we have a uniform
986 calling interface for 32 and 64-bit FPRs. No sense in changing
987 all callers for gen_load_fpr32 when we need the CTX parameter for
989 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
990 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
991 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
992 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
993 int ft, int fs, int cc) \
995 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
996 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
999 check_cp1_64bitmode(ctx); \
1005 check_cp1_registers(ctx, fs | ft); \
1013 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1014 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1016 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1017 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1018 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1019 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1020 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1021 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1022 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1023 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1024 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1025 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1026 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1027 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1028 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1029 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1030 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1031 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1034 tcg_temp_free_i##bits (fp0); \
1035 tcg_temp_free_i##bits (fp1); \
1038 FOP_CONDS(, 0, d
, FMT_D
, 64)
1039 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1040 FOP_CONDS(, 0, s
, FMT_S
, 32)
1041 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1042 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1043 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1045 #undef gen_ldcmp_fpr32
1046 #undef gen_ldcmp_fpr64
1048 /* load/store instructions. */
1049 #define OP_LD(insn,fname) \
1050 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1052 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1059 #if defined(TARGET_MIPS64)
1065 #define OP_ST(insn,fname) \
1066 static inline void op_st_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
1068 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
1073 #if defined(TARGET_MIPS64)
1078 #ifdef CONFIG_USER_ONLY
1079 #define OP_LD_ATOMIC(insn,fname) \
1080 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1082 TCGv t0 = tcg_temp_new(); \
1083 tcg_gen_mov_tl(t0, arg1); \
1084 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1085 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1086 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1087 tcg_temp_free(t0); \
1090 #define OP_LD_ATOMIC(insn,fname) \
1091 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
1093 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
1096 OP_LD_ATOMIC(ll
,ld32s
);
1097 #if defined(TARGET_MIPS64)
1098 OP_LD_ATOMIC(lld
,ld64
);
1102 #ifdef CONFIG_USER_ONLY
1103 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1104 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1106 TCGv t0 = tcg_temp_new(); \
1107 int l1 = gen_new_label(); \
1108 int l2 = gen_new_label(); \
1110 tcg_gen_andi_tl(t0, arg2, almask); \
1111 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
1112 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
1113 generate_exception(ctx, EXCP_AdES); \
1114 gen_set_label(l1); \
1115 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1116 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
1117 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
1118 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
1119 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
1120 gen_helper_0e0i(raise_exception, EXCP_SC); \
1121 gen_set_label(l2); \
1122 tcg_gen_movi_tl(t0, 0); \
1123 gen_store_gpr(t0, rt); \
1124 tcg_temp_free(t0); \
1127 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
1128 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
1130 TCGv t0 = tcg_temp_new(); \
1131 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
1132 gen_store_gpr(t0, rt); \
1133 tcg_temp_free(t0); \
1136 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
1137 #if defined(TARGET_MIPS64)
1138 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1142 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1143 int base
, int16_t offset
)
1146 tcg_gen_movi_tl(addr
, offset
);
1147 } else if (offset
== 0) {
1148 gen_load_gpr(addr
, base
);
1150 tcg_gen_movi_tl(addr
, offset
);
1151 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1155 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1157 target_ulong pc
= ctx
->pc
;
1159 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1160 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1165 pc
&= ~(target_ulong
)3;
1170 static void gen_ld (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1171 int rt
, int base
, int16_t offset
)
1173 const char *opn
= "ld";
1176 if (rt
== 0 && env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1177 /* Loongson CPU uses a load to zero register for prefetch.
1178 We emulate it as a NOP. On other CPU we must perform the
1179 actual memory access. */
1184 t0
= tcg_temp_new();
1185 t1
= tcg_temp_new();
1186 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1189 #if defined(TARGET_MIPS64)
1191 save_cpu_state(ctx
, 0);
1192 op_ld_lwu(t0
, t0
, ctx
);
1193 gen_store_gpr(t0
, rt
);
1197 save_cpu_state(ctx
, 0);
1198 op_ld_ld(t0
, t0
, ctx
);
1199 gen_store_gpr(t0
, rt
);
1203 save_cpu_state(ctx
, 1);
1204 op_ld_lld(t0
, t0
, ctx
);
1205 gen_store_gpr(t0
, rt
);
1209 save_cpu_state(ctx
, 1);
1210 gen_load_gpr(t1
, rt
);
1211 gen_helper_1e2i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1212 gen_store_gpr(t1
, rt
);
1216 save_cpu_state(ctx
, 1);
1217 gen_load_gpr(t1
, rt
);
1218 gen_helper_1e2i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1219 gen_store_gpr(t1
, rt
);
1223 save_cpu_state(ctx
, 0);
1224 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1225 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1226 op_ld_ld(t0
, t0
, ctx
);
1227 gen_store_gpr(t0
, rt
);
1232 save_cpu_state(ctx
, 0);
1233 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1234 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1235 op_ld_lw(t0
, t0
, ctx
);
1236 gen_store_gpr(t0
, rt
);
1240 save_cpu_state(ctx
, 0);
1241 op_ld_lw(t0
, t0
, ctx
);
1242 gen_store_gpr(t0
, rt
);
1246 save_cpu_state(ctx
, 0);
1247 op_ld_lh(t0
, t0
, ctx
);
1248 gen_store_gpr(t0
, rt
);
1252 save_cpu_state(ctx
, 0);
1253 op_ld_lhu(t0
, t0
, ctx
);
1254 gen_store_gpr(t0
, rt
);
1258 save_cpu_state(ctx
, 0);
1259 op_ld_lb(t0
, t0
, ctx
);
1260 gen_store_gpr(t0
, rt
);
1264 save_cpu_state(ctx
, 0);
1265 op_ld_lbu(t0
, t0
, ctx
);
1266 gen_store_gpr(t0
, rt
);
1270 save_cpu_state(ctx
, 1);
1271 gen_load_gpr(t1
, rt
);
1272 gen_helper_1e2i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1273 gen_store_gpr(t1
, rt
);
1277 save_cpu_state(ctx
, 1);
1278 gen_load_gpr(t1
, rt
);
1279 gen_helper_1e2i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1280 gen_store_gpr(t1
, rt
);
1284 save_cpu_state(ctx
, 1);
1285 op_ld_ll(t0
, t0
, ctx
);
1286 gen_store_gpr(t0
, rt
);
1290 (void)opn
; /* avoid a compiler warning */
1291 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1297 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1298 int base
, int16_t offset
)
1300 const char *opn
= "st";
1301 TCGv t0
= tcg_temp_new();
1302 TCGv t1
= tcg_temp_new();
1304 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1305 gen_load_gpr(t1
, rt
);
1307 #if defined(TARGET_MIPS64)
1309 save_cpu_state(ctx
, 0);
1310 op_st_sd(t1
, t0
, ctx
);
1314 save_cpu_state(ctx
, 1);
1315 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1319 save_cpu_state(ctx
, 1);
1320 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1325 save_cpu_state(ctx
, 0);
1326 op_st_sw(t1
, t0
, ctx
);
1330 save_cpu_state(ctx
, 0);
1331 op_st_sh(t1
, t0
, ctx
);
1335 save_cpu_state(ctx
, 0);
1336 op_st_sb(t1
, t0
, ctx
);
1340 save_cpu_state(ctx
, 1);
1341 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
1345 save_cpu_state(ctx
, 1);
1346 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
1350 (void)opn
; /* avoid a compiler warning */
1351 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1357 /* Store conditional */
1358 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1359 int base
, int16_t offset
)
1361 const char *opn
= "st_cond";
1364 t0
= tcg_temp_local_new();
1366 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1367 /* Don't do NOP if destination is zero: we must perform the actual
1370 t1
= tcg_temp_local_new();
1371 gen_load_gpr(t1
, rt
);
1373 #if defined(TARGET_MIPS64)
1375 save_cpu_state(ctx
, 1);
1376 op_st_scd(t1
, t0
, rt
, ctx
);
1381 save_cpu_state(ctx
, 1);
1382 op_st_sc(t1
, t0
, rt
, ctx
);
1386 (void)opn
; /* avoid a compiler warning */
1387 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1392 /* Load and store */
1393 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1394 int base
, int16_t offset
)
1396 const char *opn
= "flt_ldst";
1397 TCGv t0
= tcg_temp_new();
1399 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1400 /* Don't do NOP if destination is zero: we must perform the actual
1405 TCGv_i32 fp0
= tcg_temp_new_i32();
1407 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1408 tcg_gen_trunc_tl_i32(fp0
, t0
);
1409 gen_store_fpr32(fp0
, ft
);
1410 tcg_temp_free_i32(fp0
);
1416 TCGv_i32 fp0
= tcg_temp_new_i32();
1417 TCGv t1
= tcg_temp_new();
1419 gen_load_fpr32(fp0
, ft
);
1420 tcg_gen_extu_i32_tl(t1
, fp0
);
1421 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1423 tcg_temp_free_i32(fp0
);
1429 TCGv_i64 fp0
= tcg_temp_new_i64();
1431 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1432 gen_store_fpr64(ctx
, fp0
, ft
);
1433 tcg_temp_free_i64(fp0
);
1439 TCGv_i64 fp0
= tcg_temp_new_i64();
1441 gen_load_fpr64(ctx
, fp0
, ft
);
1442 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1443 tcg_temp_free_i64(fp0
);
1449 generate_exception(ctx
, EXCP_RI
);
1452 (void)opn
; /* avoid a compiler warning */
1453 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1458 static void gen_cop1_ldst(CPUMIPSState
*env
, DisasContext
*ctx
,
1459 uint32_t op
, int rt
, int rs
, int16_t imm
)
1461 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1462 check_cp1_enabled(ctx
);
1463 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1465 generate_exception_err(ctx
, EXCP_CpU
, 1);
1469 /* Arithmetic with immediate operand */
1470 static void gen_arith_imm (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1471 int rt
, int rs
, int16_t imm
)
1473 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1474 const char *opn
= "imm arith";
1476 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1477 /* If no destination, treat it as a NOP.
1478 For addi, we must generate the overflow exception when needed. */
1485 TCGv t0
= tcg_temp_local_new();
1486 TCGv t1
= tcg_temp_new();
1487 TCGv t2
= tcg_temp_new();
1488 int l1
= gen_new_label();
1490 gen_load_gpr(t1
, rs
);
1491 tcg_gen_addi_tl(t0
, t1
, uimm
);
1492 tcg_gen_ext32s_tl(t0
, t0
);
1494 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1495 tcg_gen_xori_tl(t2
, t0
, uimm
);
1496 tcg_gen_and_tl(t1
, t1
, t2
);
1498 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1500 /* operands of same sign, result different sign */
1501 generate_exception(ctx
, EXCP_OVERFLOW
);
1503 tcg_gen_ext32s_tl(t0
, t0
);
1504 gen_store_gpr(t0
, rt
);
1511 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1512 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1514 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1518 #if defined(TARGET_MIPS64)
1521 TCGv t0
= tcg_temp_local_new();
1522 TCGv t1
= tcg_temp_new();
1523 TCGv t2
= tcg_temp_new();
1524 int l1
= gen_new_label();
1526 gen_load_gpr(t1
, rs
);
1527 tcg_gen_addi_tl(t0
, t1
, uimm
);
1529 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1530 tcg_gen_xori_tl(t2
, t0
, uimm
);
1531 tcg_gen_and_tl(t1
, t1
, t2
);
1533 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1535 /* operands of same sign, result different sign */
1536 generate_exception(ctx
, EXCP_OVERFLOW
);
1538 gen_store_gpr(t0
, rt
);
1545 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1547 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1553 (void)opn
; /* avoid a compiler warning */
1554 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1557 /* Logic with immediate operand */
1558 static void gen_logic_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1559 int rt
, int rs
, int16_t imm
)
1562 const char *opn
= "imm logic";
1565 /* If no destination, treat it as a NOP. */
1569 uimm
= (uint16_t)imm
;
1572 if (likely(rs
!= 0))
1573 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1575 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1580 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1582 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1586 if (likely(rs
!= 0))
1587 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1589 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1593 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1597 (void)opn
; /* avoid a compiler warning */
1598 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1601 /* Set on less than with immediate operand */
1602 static void gen_slt_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1603 int rt
, int rs
, int16_t imm
)
1605 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1606 const char *opn
= "imm arith";
1610 /* If no destination, treat it as a NOP. */
1614 t0
= tcg_temp_new();
1615 gen_load_gpr(t0
, rs
);
1618 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1622 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1626 (void)opn
; /* avoid a compiler warning */
1627 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1631 /* Shifts with immediate operand */
1632 static void gen_shift_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1633 int rt
, int rs
, int16_t imm
)
1635 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1636 const char *opn
= "imm shift";
1640 /* If no destination, treat it as a NOP. */
1645 t0
= tcg_temp_new();
1646 gen_load_gpr(t0
, rs
);
1649 tcg_gen_shli_tl(t0
, t0
, uimm
);
1650 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1654 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1659 tcg_gen_ext32u_tl(t0
, t0
);
1660 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1662 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1668 TCGv_i32 t1
= tcg_temp_new_i32();
1670 tcg_gen_trunc_tl_i32(t1
, t0
);
1671 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1672 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1673 tcg_temp_free_i32(t1
);
1675 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1679 #if defined(TARGET_MIPS64)
1681 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1685 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1689 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1694 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1696 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1701 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1705 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1709 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1713 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1718 (void)opn
; /* avoid a compiler warning */
1719 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1724 static void gen_arith (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1725 int rd
, int rs
, int rt
)
1727 const char *opn
= "arith";
1729 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1730 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1731 /* If no destination, treat it as a NOP.
1732 For add & sub, we must generate the overflow exception when needed. */
1740 TCGv t0
= tcg_temp_local_new();
1741 TCGv t1
= tcg_temp_new();
1742 TCGv t2
= tcg_temp_new();
1743 int l1
= gen_new_label();
1745 gen_load_gpr(t1
, rs
);
1746 gen_load_gpr(t2
, rt
);
1747 tcg_gen_add_tl(t0
, t1
, t2
);
1748 tcg_gen_ext32s_tl(t0
, t0
);
1749 tcg_gen_xor_tl(t1
, t1
, t2
);
1750 tcg_gen_xor_tl(t2
, t0
, t2
);
1751 tcg_gen_andc_tl(t1
, t2
, t1
);
1753 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1755 /* operands of same sign, result different sign */
1756 generate_exception(ctx
, EXCP_OVERFLOW
);
1758 gen_store_gpr(t0
, rd
);
1764 if (rs
!= 0 && rt
!= 0) {
1765 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1766 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1767 } else if (rs
== 0 && rt
!= 0) {
1768 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1769 } else if (rs
!= 0 && rt
== 0) {
1770 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1772 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1778 TCGv t0
= tcg_temp_local_new();
1779 TCGv t1
= tcg_temp_new();
1780 TCGv t2
= tcg_temp_new();
1781 int l1
= gen_new_label();
1783 gen_load_gpr(t1
, rs
);
1784 gen_load_gpr(t2
, rt
);
1785 tcg_gen_sub_tl(t0
, t1
, t2
);
1786 tcg_gen_ext32s_tl(t0
, t0
);
1787 tcg_gen_xor_tl(t2
, t1
, t2
);
1788 tcg_gen_xor_tl(t1
, t0
, t1
);
1789 tcg_gen_and_tl(t1
, t1
, t2
);
1791 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1793 /* operands of different sign, first operand and result different sign */
1794 generate_exception(ctx
, EXCP_OVERFLOW
);
1796 gen_store_gpr(t0
, rd
);
1802 if (rs
!= 0 && rt
!= 0) {
1803 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1804 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1805 } else if (rs
== 0 && rt
!= 0) {
1806 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1807 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1808 } else if (rs
!= 0 && rt
== 0) {
1809 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1811 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1815 #if defined(TARGET_MIPS64)
1818 TCGv t0
= tcg_temp_local_new();
1819 TCGv t1
= tcg_temp_new();
1820 TCGv t2
= tcg_temp_new();
1821 int l1
= gen_new_label();
1823 gen_load_gpr(t1
, rs
);
1824 gen_load_gpr(t2
, rt
);
1825 tcg_gen_add_tl(t0
, t1
, t2
);
1826 tcg_gen_xor_tl(t1
, t1
, t2
);
1827 tcg_gen_xor_tl(t2
, t0
, t2
);
1828 tcg_gen_andc_tl(t1
, t2
, t1
);
1830 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1832 /* operands of same sign, result different sign */
1833 generate_exception(ctx
, EXCP_OVERFLOW
);
1835 gen_store_gpr(t0
, rd
);
1841 if (rs
!= 0 && rt
!= 0) {
1842 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1843 } else if (rs
== 0 && rt
!= 0) {
1844 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1845 } else if (rs
!= 0 && rt
== 0) {
1846 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1848 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1854 TCGv t0
= tcg_temp_local_new();
1855 TCGv t1
= tcg_temp_new();
1856 TCGv t2
= tcg_temp_new();
1857 int l1
= gen_new_label();
1859 gen_load_gpr(t1
, rs
);
1860 gen_load_gpr(t2
, rt
);
1861 tcg_gen_sub_tl(t0
, t1
, t2
);
1862 tcg_gen_xor_tl(t2
, t1
, t2
);
1863 tcg_gen_xor_tl(t1
, t0
, t1
);
1864 tcg_gen_and_tl(t1
, t1
, t2
);
1866 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1868 /* operands of different sign, first operand and result different sign */
1869 generate_exception(ctx
, EXCP_OVERFLOW
);
1871 gen_store_gpr(t0
, rd
);
1877 if (rs
!= 0 && rt
!= 0) {
1878 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1879 } else if (rs
== 0 && rt
!= 0) {
1880 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1881 } else if (rs
!= 0 && rt
== 0) {
1882 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1884 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1890 if (likely(rs
!= 0 && rt
!= 0)) {
1891 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1892 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1894 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1899 (void)opn
; /* avoid a compiler warning */
1900 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1903 /* Conditional move */
1904 static void gen_cond_move(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1905 int rd
, int rs
, int rt
)
1907 const char *opn
= "cond move";
1911 /* If no destination, treat it as a NOP.
1912 For add & sub, we must generate the overflow exception when needed. */
1917 l1
= gen_new_label();
1920 if (likely(rt
!= 0))
1921 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1927 if (likely(rt
!= 0))
1928 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1933 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1935 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1938 (void)opn
; /* avoid a compiler warning */
1939 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1943 static void gen_logic(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1944 int rd
, int rs
, int rt
)
1946 const char *opn
= "logic";
1949 /* If no destination, treat it as a NOP. */
1956 if (likely(rs
!= 0 && rt
!= 0)) {
1957 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1959 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1964 if (rs
!= 0 && rt
!= 0) {
1965 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1966 } else if (rs
== 0 && rt
!= 0) {
1967 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1968 } else if (rs
!= 0 && rt
== 0) {
1969 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1971 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1976 if (likely(rs
!= 0 && rt
!= 0)) {
1977 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1978 } else if (rs
== 0 && rt
!= 0) {
1979 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1980 } else if (rs
!= 0 && rt
== 0) {
1981 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1983 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1988 if (likely(rs
!= 0 && rt
!= 0)) {
1989 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1990 } else if (rs
== 0 && rt
!= 0) {
1991 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1992 } else if (rs
!= 0 && rt
== 0) {
1993 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1995 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2000 (void)opn
; /* avoid a compiler warning */
2001 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2004 /* Set on lower than */
2005 static void gen_slt(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
2006 int rd
, int rs
, int rt
)
2008 const char *opn
= "slt";
2012 /* If no destination, treat it as a NOP. */
2017 t0
= tcg_temp_new();
2018 t1
= tcg_temp_new();
2019 gen_load_gpr(t0
, rs
);
2020 gen_load_gpr(t1
, rt
);
2023 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2027 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2031 (void)opn
; /* avoid a compiler warning */
2032 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2038 static void gen_shift (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
2039 int rd
, int rs
, int rt
)
2041 const char *opn
= "shifts";
2045 /* If no destination, treat it as a NOP.
2046 For add & sub, we must generate the overflow exception when needed. */
2051 t0
= tcg_temp_new();
2052 t1
= tcg_temp_new();
2053 gen_load_gpr(t0
, rs
);
2054 gen_load_gpr(t1
, rt
);
2057 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2058 tcg_gen_shl_tl(t0
, t1
, t0
);
2059 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2063 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2064 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2068 tcg_gen_ext32u_tl(t1
, t1
);
2069 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2070 tcg_gen_shr_tl(t0
, t1
, t0
);
2071 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2076 TCGv_i32 t2
= tcg_temp_new_i32();
2077 TCGv_i32 t3
= tcg_temp_new_i32();
2079 tcg_gen_trunc_tl_i32(t2
, t0
);
2080 tcg_gen_trunc_tl_i32(t3
, t1
);
2081 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2082 tcg_gen_rotr_i32(t2
, t3
, t2
);
2083 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2084 tcg_temp_free_i32(t2
);
2085 tcg_temp_free_i32(t3
);
2089 #if defined(TARGET_MIPS64)
2091 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2092 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2096 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2097 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2101 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2102 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2106 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2107 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2112 (void)opn
; /* avoid a compiler warning */
2113 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2118 /* Arithmetic on HI/LO registers */
2119 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
2121 const char *opn
= "hilo";
2123 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2130 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
2134 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
2139 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
2141 tcg_gen_movi_tl(cpu_HI
[0], 0);
2146 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
2148 tcg_gen_movi_tl(cpu_LO
[0], 0);
2152 (void)opn
; /* avoid a compiler warning */
2153 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2156 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
2159 const char *opn
= "mul/div";
2165 #if defined(TARGET_MIPS64)
2169 t0
= tcg_temp_local_new();
2170 t1
= tcg_temp_local_new();
2173 t0
= tcg_temp_new();
2174 t1
= tcg_temp_new();
2178 gen_load_gpr(t0
, rs
);
2179 gen_load_gpr(t1
, rt
);
2183 int l1
= gen_new_label();
2184 int l2
= gen_new_label();
2186 tcg_gen_ext32s_tl(t0
, t0
);
2187 tcg_gen_ext32s_tl(t1
, t1
);
2188 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2189 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2190 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2192 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2193 tcg_gen_movi_tl(cpu_HI
[0], 0);
2196 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
2197 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
2198 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2199 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2206 int l1
= gen_new_label();
2208 tcg_gen_ext32u_tl(t0
, t0
);
2209 tcg_gen_ext32u_tl(t1
, t1
);
2210 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2211 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2212 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2213 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2214 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2221 TCGv_i64 t2
= tcg_temp_new_i64();
2222 TCGv_i64 t3
= tcg_temp_new_i64();
2224 tcg_gen_ext_tl_i64(t2
, t0
);
2225 tcg_gen_ext_tl_i64(t3
, t1
);
2226 tcg_gen_mul_i64(t2
, t2
, t3
);
2227 tcg_temp_free_i64(t3
);
2228 tcg_gen_trunc_i64_tl(t0
, t2
);
2229 tcg_gen_shri_i64(t2
, t2
, 32);
2230 tcg_gen_trunc_i64_tl(t1
, t2
);
2231 tcg_temp_free_i64(t2
);
2232 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2233 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2239 TCGv_i64 t2
= tcg_temp_new_i64();
2240 TCGv_i64 t3
= tcg_temp_new_i64();
2242 tcg_gen_ext32u_tl(t0
, t0
);
2243 tcg_gen_ext32u_tl(t1
, t1
);
2244 tcg_gen_extu_tl_i64(t2
, t0
);
2245 tcg_gen_extu_tl_i64(t3
, t1
);
2246 tcg_gen_mul_i64(t2
, t2
, t3
);
2247 tcg_temp_free_i64(t3
);
2248 tcg_gen_trunc_i64_tl(t0
, t2
);
2249 tcg_gen_shri_i64(t2
, t2
, 32);
2250 tcg_gen_trunc_i64_tl(t1
, t2
);
2251 tcg_temp_free_i64(t2
);
2252 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2253 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2257 #if defined(TARGET_MIPS64)
2260 int l1
= gen_new_label();
2261 int l2
= gen_new_label();
2263 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2264 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2265 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2266 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2267 tcg_gen_movi_tl(cpu_HI
[0], 0);
2270 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2271 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2278 int l1
= gen_new_label();
2280 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2281 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2282 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2288 gen_helper_dmult(cpu_env
, t0
, t1
);
2292 gen_helper_dmultu(cpu_env
, t0
, t1
);
2298 TCGv_i64 t2
= tcg_temp_new_i64();
2299 TCGv_i64 t3
= tcg_temp_new_i64();
2301 tcg_gen_ext_tl_i64(t2
, t0
);
2302 tcg_gen_ext_tl_i64(t3
, t1
);
2303 tcg_gen_mul_i64(t2
, t2
, t3
);
2304 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2305 tcg_gen_add_i64(t2
, t2
, t3
);
2306 tcg_temp_free_i64(t3
);
2307 tcg_gen_trunc_i64_tl(t0
, t2
);
2308 tcg_gen_shri_i64(t2
, t2
, 32);
2309 tcg_gen_trunc_i64_tl(t1
, t2
);
2310 tcg_temp_free_i64(t2
);
2311 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2312 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2318 TCGv_i64 t2
= tcg_temp_new_i64();
2319 TCGv_i64 t3
= tcg_temp_new_i64();
2321 tcg_gen_ext32u_tl(t0
, t0
);
2322 tcg_gen_ext32u_tl(t1
, t1
);
2323 tcg_gen_extu_tl_i64(t2
, t0
);
2324 tcg_gen_extu_tl_i64(t3
, t1
);
2325 tcg_gen_mul_i64(t2
, t2
, t3
);
2326 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2327 tcg_gen_add_i64(t2
, t2
, t3
);
2328 tcg_temp_free_i64(t3
);
2329 tcg_gen_trunc_i64_tl(t0
, t2
);
2330 tcg_gen_shri_i64(t2
, t2
, 32);
2331 tcg_gen_trunc_i64_tl(t1
, t2
);
2332 tcg_temp_free_i64(t2
);
2333 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2334 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2340 TCGv_i64 t2
= tcg_temp_new_i64();
2341 TCGv_i64 t3
= tcg_temp_new_i64();
2343 tcg_gen_ext_tl_i64(t2
, t0
);
2344 tcg_gen_ext_tl_i64(t3
, t1
);
2345 tcg_gen_mul_i64(t2
, t2
, t3
);
2346 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2347 tcg_gen_sub_i64(t2
, t3
, t2
);
2348 tcg_temp_free_i64(t3
);
2349 tcg_gen_trunc_i64_tl(t0
, t2
);
2350 tcg_gen_shri_i64(t2
, t2
, 32);
2351 tcg_gen_trunc_i64_tl(t1
, t2
);
2352 tcg_temp_free_i64(t2
);
2353 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2354 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2360 TCGv_i64 t2
= tcg_temp_new_i64();
2361 TCGv_i64 t3
= tcg_temp_new_i64();
2363 tcg_gen_ext32u_tl(t0
, t0
);
2364 tcg_gen_ext32u_tl(t1
, t1
);
2365 tcg_gen_extu_tl_i64(t2
, t0
);
2366 tcg_gen_extu_tl_i64(t3
, t1
);
2367 tcg_gen_mul_i64(t2
, t2
, t3
);
2368 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2369 tcg_gen_sub_i64(t2
, t3
, t2
);
2370 tcg_temp_free_i64(t3
);
2371 tcg_gen_trunc_i64_tl(t0
, t2
);
2372 tcg_gen_shri_i64(t2
, t2
, 32);
2373 tcg_gen_trunc_i64_tl(t1
, t2
);
2374 tcg_temp_free_i64(t2
);
2375 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2376 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2382 generate_exception(ctx
, EXCP_RI
);
2385 (void)opn
; /* avoid a compiler warning */
2386 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2392 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2393 int rd
, int rs
, int rt
)
2395 const char *opn
= "mul vr54xx";
2396 TCGv t0
= tcg_temp_new();
2397 TCGv t1
= tcg_temp_new();
2399 gen_load_gpr(t0
, rs
);
2400 gen_load_gpr(t1
, rt
);
2403 case OPC_VR54XX_MULS
:
2404 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
2407 case OPC_VR54XX_MULSU
:
2408 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
2411 case OPC_VR54XX_MACC
:
2412 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
2415 case OPC_VR54XX_MACCU
:
2416 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
2419 case OPC_VR54XX_MSAC
:
2420 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
2423 case OPC_VR54XX_MSACU
:
2424 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
2427 case OPC_VR54XX_MULHI
:
2428 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
2431 case OPC_VR54XX_MULHIU
:
2432 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
2435 case OPC_VR54XX_MULSHI
:
2436 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
2439 case OPC_VR54XX_MULSHIU
:
2440 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
2443 case OPC_VR54XX_MACCHI
:
2444 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
2447 case OPC_VR54XX_MACCHIU
:
2448 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
2451 case OPC_VR54XX_MSACHI
:
2452 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
2455 case OPC_VR54XX_MSACHIU
:
2456 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
2460 MIPS_INVAL("mul vr54xx");
2461 generate_exception(ctx
, EXCP_RI
);
2464 gen_store_gpr(t0
, rd
);
2465 (void)opn
; /* avoid a compiler warning */
2466 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2473 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2476 const char *opn
= "CLx";
2484 t0
= tcg_temp_new();
2485 gen_load_gpr(t0
, rs
);
2488 gen_helper_clo(cpu_gpr
[rd
], t0
);
2492 gen_helper_clz(cpu_gpr
[rd
], t0
);
2495 #if defined(TARGET_MIPS64)
2497 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2501 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2506 (void)opn
; /* avoid a compiler warning */
2507 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2511 /* Godson integer instructions */
2512 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
2513 int rd
, int rs
, int rt
)
2515 const char *opn
= "loongson";
2527 case OPC_MULTU_G_2E
:
2528 case OPC_MULTU_G_2F
:
2529 #if defined(TARGET_MIPS64)
2530 case OPC_DMULT_G_2E
:
2531 case OPC_DMULT_G_2F
:
2532 case OPC_DMULTU_G_2E
:
2533 case OPC_DMULTU_G_2F
:
2535 t0
= tcg_temp_new();
2536 t1
= tcg_temp_new();
2539 t0
= tcg_temp_local_new();
2540 t1
= tcg_temp_local_new();
2544 gen_load_gpr(t0
, rs
);
2545 gen_load_gpr(t1
, rt
);
2550 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2551 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2554 case OPC_MULTU_G_2E
:
2555 case OPC_MULTU_G_2F
:
2556 tcg_gen_ext32u_tl(t0
, t0
);
2557 tcg_gen_ext32u_tl(t1
, t1
);
2558 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2559 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2565 int l1
= gen_new_label();
2566 int l2
= gen_new_label();
2567 int l3
= gen_new_label();
2568 tcg_gen_ext32s_tl(t0
, t0
);
2569 tcg_gen_ext32s_tl(t1
, t1
);
2570 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2571 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2574 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2575 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2576 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2579 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2580 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2588 int l1
= gen_new_label();
2589 int l2
= gen_new_label();
2590 tcg_gen_ext32u_tl(t0
, t0
);
2591 tcg_gen_ext32u_tl(t1
, t1
);
2592 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2593 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2596 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2597 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2605 int l1
= gen_new_label();
2606 int l2
= gen_new_label();
2607 int l3
= gen_new_label();
2608 tcg_gen_ext32u_tl(t0
, t0
);
2609 tcg_gen_ext32u_tl(t1
, t1
);
2610 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2611 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2612 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2614 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2617 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2618 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2626 int l1
= gen_new_label();
2627 int l2
= gen_new_label();
2628 tcg_gen_ext32u_tl(t0
, t0
);
2629 tcg_gen_ext32u_tl(t1
, t1
);
2630 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2631 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2634 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2635 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2640 #if defined(TARGET_MIPS64)
2641 case OPC_DMULT_G_2E
:
2642 case OPC_DMULT_G_2F
:
2643 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2646 case OPC_DMULTU_G_2E
:
2647 case OPC_DMULTU_G_2F
:
2648 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2654 int l1
= gen_new_label();
2655 int l2
= gen_new_label();
2656 int l3
= gen_new_label();
2657 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2658 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2661 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2662 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2663 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2666 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2671 case OPC_DDIVU_G_2E
:
2672 case OPC_DDIVU_G_2F
:
2674 int l1
= gen_new_label();
2675 int l2
= gen_new_label();
2676 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2677 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2680 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2688 int l1
= gen_new_label();
2689 int l2
= gen_new_label();
2690 int l3
= gen_new_label();
2691 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2692 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2693 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2695 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2698 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2703 case OPC_DMODU_G_2E
:
2704 case OPC_DMODU_G_2F
:
2706 int l1
= gen_new_label();
2707 int l2
= gen_new_label();
2708 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2709 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2712 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2720 (void)opn
; /* avoid a compiler warning */
2721 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2726 /* Loongson multimedia instructions */
2727 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
2729 const char *opn
= "loongson_cp2";
2730 uint32_t opc
, shift_max
;
2733 opc
= MASK_LMI(ctx
->opcode
);
2739 t0
= tcg_temp_local_new_i64();
2740 t1
= tcg_temp_local_new_i64();
2743 t0
= tcg_temp_new_i64();
2744 t1
= tcg_temp_new_i64();
2748 gen_load_fpr64(ctx
, t0
, rs
);
2749 gen_load_fpr64(ctx
, t1
, rt
);
2751 #define LMI_HELPER(UP, LO) \
2752 case OPC_##UP: gen_helper_##LO(t0, t0, t1); opn = #LO; break
2753 #define LMI_HELPER_1(UP, LO) \
2754 case OPC_##UP: gen_helper_##LO(t0, t0); opn = #LO; break
2755 #define LMI_DIRECT(UP, LO, OP) \
2756 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); opn = #LO; break
2759 LMI_HELPER(PADDSH
, paddsh
);
2760 LMI_HELPER(PADDUSH
, paddush
);
2761 LMI_HELPER(PADDH
, paddh
);
2762 LMI_HELPER(PADDW
, paddw
);
2763 LMI_HELPER(PADDSB
, paddsb
);
2764 LMI_HELPER(PADDUSB
, paddusb
);
2765 LMI_HELPER(PADDB
, paddb
);
2767 LMI_HELPER(PSUBSH
, psubsh
);
2768 LMI_HELPER(PSUBUSH
, psubush
);
2769 LMI_HELPER(PSUBH
, psubh
);
2770 LMI_HELPER(PSUBW
, psubw
);
2771 LMI_HELPER(PSUBSB
, psubsb
);
2772 LMI_HELPER(PSUBUSB
, psubusb
);
2773 LMI_HELPER(PSUBB
, psubb
);
2775 LMI_HELPER(PSHUFH
, pshufh
);
2776 LMI_HELPER(PACKSSWH
, packsswh
);
2777 LMI_HELPER(PACKSSHB
, packsshb
);
2778 LMI_HELPER(PACKUSHB
, packushb
);
2780 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
2781 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
2782 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
2783 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
2784 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
2785 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
2787 LMI_HELPER(PAVGH
, pavgh
);
2788 LMI_HELPER(PAVGB
, pavgb
);
2789 LMI_HELPER(PMAXSH
, pmaxsh
);
2790 LMI_HELPER(PMINSH
, pminsh
);
2791 LMI_HELPER(PMAXUB
, pmaxub
);
2792 LMI_HELPER(PMINUB
, pminub
);
2794 LMI_HELPER(PCMPEQW
, pcmpeqw
);
2795 LMI_HELPER(PCMPGTW
, pcmpgtw
);
2796 LMI_HELPER(PCMPEQH
, pcmpeqh
);
2797 LMI_HELPER(PCMPGTH
, pcmpgth
);
2798 LMI_HELPER(PCMPEQB
, pcmpeqb
);
2799 LMI_HELPER(PCMPGTB
, pcmpgtb
);
2801 LMI_HELPER(PSLLW
, psllw
);
2802 LMI_HELPER(PSLLH
, psllh
);
2803 LMI_HELPER(PSRLW
, psrlw
);
2804 LMI_HELPER(PSRLH
, psrlh
);
2805 LMI_HELPER(PSRAW
, psraw
);
2806 LMI_HELPER(PSRAH
, psrah
);
2808 LMI_HELPER(PMULLH
, pmullh
);
2809 LMI_HELPER(PMULHH
, pmulhh
);
2810 LMI_HELPER(PMULHUH
, pmulhuh
);
2811 LMI_HELPER(PMADDHW
, pmaddhw
);
2813 LMI_HELPER(PASUBUB
, pasubub
);
2814 LMI_HELPER_1(BIADD
, biadd
);
2815 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
2817 LMI_DIRECT(PADDD
, paddd
, add
);
2818 LMI_DIRECT(PSUBD
, psubd
, sub
);
2819 LMI_DIRECT(XOR_CP2
, xor, xor);
2820 LMI_DIRECT(NOR_CP2
, nor
, nor
);
2821 LMI_DIRECT(AND_CP2
, and, and);
2822 LMI_DIRECT(PANDN
, pandn
, andc
);
2823 LMI_DIRECT(OR
, or, or);
2826 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
2830 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
2834 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
2838 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
2843 tcg_gen_andi_i64(t1
, t1
, 3);
2844 tcg_gen_shli_i64(t1
, t1
, 4);
2845 tcg_gen_shr_i64(t0
, t0
, t1
);
2846 tcg_gen_ext16u_i64(t0
, t0
);
2851 tcg_gen_add_i64(t0
, t0
, t1
);
2852 tcg_gen_ext32s_i64(t0
, t0
);
2856 tcg_gen_sub_i64(t0
, t0
, t1
);
2857 tcg_gen_ext32s_i64(t0
, t0
);
2886 /* Make sure shift count isn't TCG undefined behaviour. */
2887 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
2892 tcg_gen_shl_i64(t0
, t0
, t1
);
2896 /* Since SRA is UndefinedResult without sign-extended inputs,
2897 we can treat SRA and DSRA the same. */
2898 tcg_gen_sar_i64(t0
, t0
, t1
);
2901 /* We want to shift in zeros for SRL; zero-extend first. */
2902 tcg_gen_ext32u_i64(t0
, t0
);
2905 tcg_gen_shr_i64(t0
, t0
, t1
);
2909 if (shift_max
== 32) {
2910 tcg_gen_ext32s_i64(t0
, t0
);
2913 /* Shifts larger than MAX produce zero. */
2914 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
2915 tcg_gen_neg_i64(t1
, t1
);
2916 tcg_gen_and_i64(t0
, t0
, t1
);
2922 TCGv_i64 t2
= tcg_temp_new_i64();
2923 int lab
= gen_new_label();
2925 tcg_gen_mov_i64(t2
, t0
);
2926 tcg_gen_add_i64(t0
, t1
, t2
);
2927 if (opc
== OPC_ADD_CP2
) {
2928 tcg_gen_ext32s_i64(t0
, t0
);
2930 tcg_gen_xor_i64(t1
, t1
, t2
);
2931 tcg_gen_xor_i64(t2
, t2
, t0
);
2932 tcg_gen_andc_i64(t1
, t2
, t1
);
2933 tcg_temp_free_i64(t2
);
2934 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2935 generate_exception(ctx
, EXCP_OVERFLOW
);
2938 opn
= (opc
== OPC_ADD_CP2
? "add" : "dadd");
2945 TCGv_i64 t2
= tcg_temp_new_i64();
2946 int lab
= gen_new_label();
2948 tcg_gen_mov_i64(t2
, t0
);
2949 tcg_gen_sub_i64(t0
, t1
, t2
);
2950 if (opc
== OPC_SUB_CP2
) {
2951 tcg_gen_ext32s_i64(t0
, t0
);
2953 tcg_gen_xor_i64(t1
, t1
, t2
);
2954 tcg_gen_xor_i64(t2
, t2
, t0
);
2955 tcg_gen_and_i64(t1
, t1
, t2
);
2956 tcg_temp_free_i64(t2
);
2957 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
2958 generate_exception(ctx
, EXCP_OVERFLOW
);
2961 opn
= (opc
== OPC_SUB_CP2
? "sub" : "dsub");
2966 tcg_gen_ext32u_i64(t0
, t0
);
2967 tcg_gen_ext32u_i64(t1
, t1
);
2968 tcg_gen_mul_i64(t0
, t0
, t1
);
2978 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
2979 FD field is the CC field? */
2982 generate_exception(ctx
, EXCP_RI
);
2989 gen_store_fpr64(ctx
, t0
, rd
);
2991 (void)opn
; /* avoid a compiler warning */
2992 MIPS_DEBUG("%s %s, %s, %s", opn
,
2993 fregnames
[rd
], fregnames
[rs
], fregnames
[rt
]);
2994 tcg_temp_free_i64(t0
);
2995 tcg_temp_free_i64(t1
);
2999 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
3000 int rs
, int rt
, int16_t imm
)
3003 TCGv t0
= tcg_temp_new();
3004 TCGv t1
= tcg_temp_new();
3007 /* Load needed operands */
3015 /* Compare two registers */
3017 gen_load_gpr(t0
, rs
);
3018 gen_load_gpr(t1
, rt
);
3028 /* Compare register to immediate */
3029 if (rs
!= 0 || imm
!= 0) {
3030 gen_load_gpr(t0
, rs
);
3031 tcg_gen_movi_tl(t1
, (int32_t)imm
);
3038 case OPC_TEQ
: /* rs == rs */
3039 case OPC_TEQI
: /* r0 == 0 */
3040 case OPC_TGE
: /* rs >= rs */
3041 case OPC_TGEI
: /* r0 >= 0 */
3042 case OPC_TGEU
: /* rs >= rs unsigned */
3043 case OPC_TGEIU
: /* r0 >= 0 unsigned */
3045 generate_exception(ctx
, EXCP_TRAP
);
3047 case OPC_TLT
: /* rs < rs */
3048 case OPC_TLTI
: /* r0 < 0 */
3049 case OPC_TLTU
: /* rs < rs unsigned */
3050 case OPC_TLTIU
: /* r0 < 0 unsigned */
3051 case OPC_TNE
: /* rs != rs */
3052 case OPC_TNEI
: /* r0 != 0 */
3053 /* Never trap: treat as NOP. */
3057 int l1
= gen_new_label();
3062 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
3066 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
3070 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
3074 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
3078 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
3082 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
3085 generate_exception(ctx
, EXCP_TRAP
);
3092 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
3094 TranslationBlock
*tb
;
3096 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
3097 likely(!ctx
->singlestep_enabled
)) {
3100 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
3103 if (ctx
->singlestep_enabled
) {
3104 save_cpu_state(ctx
, 0);
3105 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
3111 /* Branches (before delay slot) */
3112 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
3114 int rs
, int rt
, int32_t offset
)
3116 target_ulong btgt
= -1;
3118 int bcond_compute
= 0;
3119 TCGv t0
= tcg_temp_new();
3120 TCGv t1
= tcg_temp_new();
3122 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3123 #ifdef MIPS_DEBUG_DISAS
3124 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
3126 generate_exception(ctx
, EXCP_RI
);
3130 /* Load needed operands */
3136 /* Compare two registers */
3138 gen_load_gpr(t0
, rs
);
3139 gen_load_gpr(t1
, rt
);
3142 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3158 /* Compare to zero */
3160 gen_load_gpr(t0
, rs
);
3163 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
3170 /* Jump to immediate */
3171 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
3177 /* Jump to register */
3178 if (offset
!= 0 && offset
!= 16) {
3179 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
3180 others are reserved. */
3181 MIPS_INVAL("jump hint");
3182 generate_exception(ctx
, EXCP_RI
);
3185 gen_load_gpr(btarget
, rs
);
3188 MIPS_INVAL("branch/jump");
3189 generate_exception(ctx
, EXCP_RI
);
3192 if (bcond_compute
== 0) {
3193 /* No condition to be computed */
3195 case OPC_BEQ
: /* rx == rx */
3196 case OPC_BEQL
: /* rx == rx likely */
3197 case OPC_BGEZ
: /* 0 >= 0 */
3198 case OPC_BGEZL
: /* 0 >= 0 likely */
3199 case OPC_BLEZ
: /* 0 <= 0 */
3200 case OPC_BLEZL
: /* 0 <= 0 likely */
3202 ctx
->hflags
|= MIPS_HFLAG_B
;
3203 MIPS_DEBUG("balways");
3206 case OPC_BGEZAL
: /* 0 >= 0 */
3207 case OPC_BGEZALL
: /* 0 >= 0 likely */
3208 ctx
->hflags
|= (opc
== OPC_BGEZALS
3210 : MIPS_HFLAG_BDS32
);
3211 /* Always take and link */
3213 ctx
->hflags
|= MIPS_HFLAG_B
;
3214 MIPS_DEBUG("balways and link");
3216 case OPC_BNE
: /* rx != rx */
3217 case OPC_BGTZ
: /* 0 > 0 */
3218 case OPC_BLTZ
: /* 0 < 0 */
3220 MIPS_DEBUG("bnever (NOP)");
3223 case OPC_BLTZAL
: /* 0 < 0 */
3224 ctx
->hflags
|= (opc
== OPC_BLTZALS
3226 : MIPS_HFLAG_BDS32
);
3227 /* Handle as an unconditional branch to get correct delay
3230 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
3231 ctx
->hflags
|= MIPS_HFLAG_B
;
3232 MIPS_DEBUG("bnever and link");
3234 case OPC_BLTZALL
: /* 0 < 0 likely */
3235 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
3236 /* Skip the instruction in the delay slot */
3237 MIPS_DEBUG("bnever, link and skip");
3240 case OPC_BNEL
: /* rx != rx likely */
3241 case OPC_BGTZL
: /* 0 > 0 likely */
3242 case OPC_BLTZL
: /* 0 < 0 likely */
3243 /* Skip the instruction in the delay slot */
3244 MIPS_DEBUG("bnever and skip");
3248 ctx
->hflags
|= MIPS_HFLAG_B
;
3249 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
3253 ctx
->hflags
|= MIPS_HFLAG_BX
;
3258 ctx
->hflags
|= MIPS_HFLAG_B
;
3259 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
3261 : MIPS_HFLAG_BDS32
);
3262 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
3265 ctx
->hflags
|= MIPS_HFLAG_BR
;
3266 if (insn_bytes
== 4)
3267 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
3268 MIPS_DEBUG("jr %s", regnames
[rs
]);
3274 ctx
->hflags
|= MIPS_HFLAG_BR
;
3275 ctx
->hflags
|= (opc
== OPC_JALRS
3277 : MIPS_HFLAG_BDS32
);
3278 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
3281 MIPS_INVAL("branch/jump");
3282 generate_exception(ctx
, EXCP_RI
);
3288 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3289 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
3290 regnames
[rs
], regnames
[rt
], btgt
);
3293 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
3294 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
3295 regnames
[rs
], regnames
[rt
], btgt
);
3298 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3299 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
3300 regnames
[rs
], regnames
[rt
], btgt
);
3303 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
3304 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
3305 regnames
[rs
], regnames
[rt
], btgt
);
3308 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3309 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3312 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3313 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3317 ctx
->hflags
|= (opc
== OPC_BGEZALS
3319 : MIPS_HFLAG_BDS32
);
3320 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3321 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3325 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
3327 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3330 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3331 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3334 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
3335 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3338 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3339 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3342 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
3343 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3346 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3347 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3350 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3351 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3355 ctx
->hflags
|= (opc
== OPC_BLTZALS
3357 : MIPS_HFLAG_BDS32
);
3358 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3360 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3362 ctx
->hflags
|= MIPS_HFLAG_BC
;
3365 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
3367 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
3369 ctx
->hflags
|= MIPS_HFLAG_BL
;
3372 MIPS_INVAL("conditional branch/jump");
3373 generate_exception(ctx
, EXCP_RI
);
3377 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
3378 blink
, ctx
->hflags
, btgt
);
3380 ctx
->btarget
= btgt
;
3382 int post_delay
= insn_bytes
;
3383 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
3385 if (opc
!= OPC_JALRC
)
3386 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
3388 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
3392 if (insn_bytes
== 2)
3393 ctx
->hflags
|= MIPS_HFLAG_B16
;
3398 /* special3 bitfield operations */
3399 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
3400 int rs
, int lsb
, int msb
)
3402 TCGv t0
= tcg_temp_new();
3403 TCGv t1
= tcg_temp_new();
3406 gen_load_gpr(t1
, rs
);
3411 tcg_gen_shri_tl(t0
, t1
, lsb
);
3413 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
3415 tcg_gen_ext32s_tl(t0
, t0
);
3418 #if defined(TARGET_MIPS64)
3420 tcg_gen_shri_tl(t0
, t1
, lsb
);
3422 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3426 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3427 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3430 tcg_gen_shri_tl(t0
, t1
, lsb
);
3431 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3437 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
3438 gen_load_gpr(t0
, rt
);
3439 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3440 tcg_gen_shli_tl(t1
, t1
, lsb
);
3441 tcg_gen_andi_tl(t1
, t1
, mask
);
3442 tcg_gen_or_tl(t0
, t0
, t1
);
3443 tcg_gen_ext32s_tl(t0
, t0
);
3445 #if defined(TARGET_MIPS64)
3449 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
3450 gen_load_gpr(t0
, rt
);
3451 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3452 tcg_gen_shli_tl(t1
, t1
, lsb
);
3453 tcg_gen_andi_tl(t1
, t1
, mask
);
3454 tcg_gen_or_tl(t0
, t0
, t1
);
3459 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
3460 gen_load_gpr(t0
, rt
);
3461 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3462 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
3463 tcg_gen_andi_tl(t1
, t1
, mask
);
3464 tcg_gen_or_tl(t0
, t0
, t1
);
3469 gen_load_gpr(t0
, rt
);
3470 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
3471 gen_load_gpr(t0
, rt
);
3472 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3473 tcg_gen_shli_tl(t1
, t1
, lsb
);
3474 tcg_gen_andi_tl(t1
, t1
, mask
);
3475 tcg_gen_or_tl(t0
, t0
, t1
);
3480 MIPS_INVAL("bitops");
3481 generate_exception(ctx
, EXCP_RI
);
3486 gen_store_gpr(t0
, rt
);
3491 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3496 /* If no destination, treat it as a NOP. */
3501 t0
= tcg_temp_new();
3502 gen_load_gpr(t0
, rt
);
3506 TCGv t1
= tcg_temp_new();
3508 tcg_gen_shri_tl(t1
, t0
, 8);
3509 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3510 tcg_gen_shli_tl(t0
, t0
, 8);
3511 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3512 tcg_gen_or_tl(t0
, t0
, t1
);
3514 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3518 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
3521 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
3523 #if defined(TARGET_MIPS64)
3526 TCGv t1
= tcg_temp_new();
3528 tcg_gen_shri_tl(t1
, t0
, 8);
3529 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
3530 tcg_gen_shli_tl(t0
, t0
, 8);
3531 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
3532 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3538 TCGv t1
= tcg_temp_new();
3540 tcg_gen_shri_tl(t1
, t0
, 16);
3541 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
3542 tcg_gen_shli_tl(t0
, t0
, 16);
3543 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
3544 tcg_gen_or_tl(t0
, t0
, t1
);
3545 tcg_gen_shri_tl(t1
, t0
, 32);
3546 tcg_gen_shli_tl(t0
, t0
, 32);
3547 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3553 MIPS_INVAL("bsfhl");
3554 generate_exception(ctx
, EXCP_RI
);
3561 #ifndef CONFIG_USER_ONLY
3562 /* CP0 (MMU and control) */
3563 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
3565 TCGv_i32 t0
= tcg_temp_new_i32();
3567 tcg_gen_ld_i32(t0
, cpu_env
, off
);
3568 tcg_gen_ext_i32_tl(arg
, t0
);
3569 tcg_temp_free_i32(t0
);
3572 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
3574 tcg_gen_ld_tl(arg
, cpu_env
, off
);
3575 tcg_gen_ext32s_tl(arg
, arg
);
3578 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
3580 TCGv_i32 t0
= tcg_temp_new_i32();
3582 tcg_gen_trunc_tl_i32(t0
, arg
);
3583 tcg_gen_st_i32(t0
, cpu_env
, off
);
3584 tcg_temp_free_i32(t0
);
3587 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
3589 tcg_gen_ext32s_tl(arg
, arg
);
3590 tcg_gen_st_tl(arg
, cpu_env
, off
);
3593 static void gen_mfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3595 const char *rn
= "invalid";
3598 check_insn(env
, ctx
, ISA_MIPS32
);
3604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
3608 check_insn(env
, ctx
, ASE_MT
);
3609 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
3613 check_insn(env
, ctx
, ASE_MT
);
3614 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
3618 check_insn(env
, ctx
, ASE_MT
);
3619 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
3629 gen_helper_mfc0_random(arg
, cpu_env
);
3633 check_insn(env
, ctx
, ASE_MT
);
3634 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
3638 check_insn(env
, ctx
, ASE_MT
);
3639 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
3643 check_insn(env
, ctx
, ASE_MT
);
3644 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
3648 check_insn(env
, ctx
, ASE_MT
);
3649 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
3653 check_insn(env
, ctx
, ASE_MT
);
3654 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
3658 check_insn(env
, ctx
, ASE_MT
);
3659 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
3660 rn
= "VPEScheFBack";
3663 check_insn(env
, ctx
, ASE_MT
);
3664 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
3674 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
3675 tcg_gen_ext32s_tl(arg
, arg
);
3679 check_insn(env
, ctx
, ASE_MT
);
3680 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
3684 check_insn(env
, ctx
, ASE_MT
);
3685 gen_helper_mfc0_tcbind(arg
, cpu_env
);
3689 check_insn(env
, ctx
, ASE_MT
);
3690 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
3694 check_insn(env
, ctx
, ASE_MT
);
3695 gen_helper_mfc0_tchalt(arg
, cpu_env
);
3699 check_insn(env
, ctx
, ASE_MT
);
3700 gen_helper_mfc0_tccontext(arg
, cpu_env
);
3704 check_insn(env
, ctx
, ASE_MT
);
3705 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
3709 check_insn(env
, ctx
, ASE_MT
);
3710 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
3720 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
3721 tcg_gen_ext32s_tl(arg
, arg
);
3731 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
3732 tcg_gen_ext32s_tl(arg
, arg
);
3736 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3737 rn
= "ContextConfig";
3746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
3750 check_insn(env
, ctx
, ISA_MIPS32R2
);
3751 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
3761 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
3765 check_insn(env
, ctx
, ISA_MIPS32R2
);
3766 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
3770 check_insn(env
, ctx
, ISA_MIPS32R2
);
3771 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
3775 check_insn(env
, ctx
, ISA_MIPS32R2
);
3776 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
3780 check_insn(env
, ctx
, ISA_MIPS32R2
);
3781 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
3785 check_insn(env
, ctx
, ISA_MIPS32R2
);
3786 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
3796 check_insn(env
, ctx
, ISA_MIPS32R2
);
3797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
3807 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
3808 tcg_gen_ext32s_tl(arg
, arg
);
3818 /* Mark as an IO operation because we read the time. */
3821 gen_helper_mfc0_count(arg
, cpu_env
);
3825 /* Break the TB to be able to take timer interrupts immediately
3826 after reading count. */
3827 ctx
->bstate
= BS_STOP
;
3830 /* 6,7 are implementation dependent */
3838 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
3839 tcg_gen_ext32s_tl(arg
, arg
);
3849 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
3852 /* 6,7 are implementation dependent */
3860 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
3864 check_insn(env
, ctx
, ISA_MIPS32R2
);
3865 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
3869 check_insn(env
, ctx
, ISA_MIPS32R2
);
3870 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
3874 check_insn(env
, ctx
, ISA_MIPS32R2
);
3875 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
3885 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
3895 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
3896 tcg_gen_ext32s_tl(arg
, arg
);
3906 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
3910 check_insn(env
, ctx
, ISA_MIPS32R2
);
3911 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
3921 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
3925 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
3929 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
3933 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
3936 /* 4,5 are reserved */
3937 /* 6,7 are implementation dependent */
3939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
3943 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
3953 gen_helper_mfc0_lladdr(arg
, cpu_env
);
3963 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
3973 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
3983 #if defined(TARGET_MIPS64)
3984 check_insn(env
, ctx
, ISA_MIPS3
);
3985 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
3986 tcg_gen_ext32s_tl(arg
, arg
);
3995 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3998 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
4006 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4007 rn
= "'Diagnostic"; /* implementation dependent */
4012 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
4016 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
4017 rn
= "TraceControl";
4020 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
4021 rn
= "TraceControl2";
4024 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
4025 rn
= "UserTraceData";
4028 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
4039 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
4040 tcg_gen_ext32s_tl(arg
, arg
);
4050 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
4051 rn
= "Performance0";
4054 // gen_helper_mfc0_performance1(arg);
4055 rn
= "Performance1";
4058 // gen_helper_mfc0_performance2(arg);
4059 rn
= "Performance2";
4062 // gen_helper_mfc0_performance3(arg);
4063 rn
= "Performance3";
4066 // gen_helper_mfc0_performance4(arg);
4067 rn
= "Performance4";
4070 // gen_helper_mfc0_performance5(arg);
4071 rn
= "Performance5";
4074 // gen_helper_mfc0_performance6(arg);
4075 rn
= "Performance6";
4078 // gen_helper_mfc0_performance7(arg);
4079 rn
= "Performance7";
4086 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4092 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4105 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
4125 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
4132 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
4142 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4143 tcg_gen_ext32s_tl(arg
, arg
);
4154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4164 (void)rn
; /* avoid a compiler warning */
4165 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4169 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4170 generate_exception(ctx
, EXCP_RI
);
4173 static void gen_mtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4175 const char *rn
= "invalid";
4178 check_insn(env
, ctx
, ISA_MIPS32
);
4187 gen_helper_mtc0_index(cpu_env
, arg
);
4191 check_insn(env
, ctx
, ASE_MT
);
4192 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
4196 check_insn(env
, ctx
, ASE_MT
);
4201 check_insn(env
, ctx
, ASE_MT
);
4216 check_insn(env
, ctx
, ASE_MT
);
4217 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
4221 check_insn(env
, ctx
, ASE_MT
);
4222 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
4226 check_insn(env
, ctx
, ASE_MT
);
4227 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
4231 check_insn(env
, ctx
, ASE_MT
);
4232 gen_helper_mtc0_yqmask(cpu_env
, arg
);
4236 check_insn(env
, ctx
, ASE_MT
);
4237 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4241 check_insn(env
, ctx
, ASE_MT
);
4242 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4243 rn
= "VPEScheFBack";
4246 check_insn(env
, ctx
, ASE_MT
);
4247 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
4257 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
4261 check_insn(env
, ctx
, ASE_MT
);
4262 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
4266 check_insn(env
, ctx
, ASE_MT
);
4267 gen_helper_mtc0_tcbind(cpu_env
, arg
);
4271 check_insn(env
, ctx
, ASE_MT
);
4272 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
4276 check_insn(env
, ctx
, ASE_MT
);
4277 gen_helper_mtc0_tchalt(cpu_env
, arg
);
4281 check_insn(env
, ctx
, ASE_MT
);
4282 gen_helper_mtc0_tccontext(cpu_env
, arg
);
4286 check_insn(env
, ctx
, ASE_MT
);
4287 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
4291 check_insn(env
, ctx
, ASE_MT
);
4292 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
4302 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
4312 gen_helper_mtc0_context(cpu_env
, arg
);
4316 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
4317 rn
= "ContextConfig";
4326 gen_helper_mtc0_pagemask(cpu_env
, arg
);
4330 check_insn(env
, ctx
, ISA_MIPS32R2
);
4331 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
4341 gen_helper_mtc0_wired(cpu_env
, arg
);
4345 check_insn(env
, ctx
, ISA_MIPS32R2
);
4346 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
4350 check_insn(env
, ctx
, ISA_MIPS32R2
);
4351 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
4355 check_insn(env
, ctx
, ISA_MIPS32R2
);
4356 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
4360 check_insn(env
, ctx
, ISA_MIPS32R2
);
4361 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
4365 check_insn(env
, ctx
, ISA_MIPS32R2
);
4366 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
4376 check_insn(env
, ctx
, ISA_MIPS32R2
);
4377 gen_helper_mtc0_hwrena(cpu_env
, arg
);
4391 gen_helper_mtc0_count(cpu_env
, arg
);
4394 /* 6,7 are implementation dependent */
4402 gen_helper_mtc0_entryhi(cpu_env
, arg
);
4412 gen_helper_mtc0_compare(cpu_env
, arg
);
4415 /* 6,7 are implementation dependent */
4423 save_cpu_state(ctx
, 1);
4424 gen_helper_mtc0_status(cpu_env
, arg
);
4425 /* BS_STOP isn't good enough here, hflags may have changed. */
4426 gen_save_pc(ctx
->pc
+ 4);
4427 ctx
->bstate
= BS_EXCP
;
4431 check_insn(env
, ctx
, ISA_MIPS32R2
);
4432 gen_helper_mtc0_intctl(cpu_env
, arg
);
4433 /* Stop translation as we may have switched the execution mode */
4434 ctx
->bstate
= BS_STOP
;
4438 check_insn(env
, ctx
, ISA_MIPS32R2
);
4439 gen_helper_mtc0_srsctl(cpu_env
, arg
);
4440 /* Stop translation as we may have switched the execution mode */
4441 ctx
->bstate
= BS_STOP
;
4445 check_insn(env
, ctx
, ISA_MIPS32R2
);
4446 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4447 /* Stop translation as we may have switched the execution mode */
4448 ctx
->bstate
= BS_STOP
;
4458 save_cpu_state(ctx
, 1);
4459 gen_helper_mtc0_cause(cpu_env
, arg
);
4469 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
4483 check_insn(env
, ctx
, ISA_MIPS32R2
);
4484 gen_helper_mtc0_ebase(cpu_env
, arg
);
4494 gen_helper_mtc0_config0(cpu_env
, arg
);
4496 /* Stop translation as we may have switched the execution mode */
4497 ctx
->bstate
= BS_STOP
;
4500 /* ignored, read only */
4504 gen_helper_mtc0_config2(cpu_env
, arg
);
4506 /* Stop translation as we may have switched the execution mode */
4507 ctx
->bstate
= BS_STOP
;
4510 /* ignored, read only */
4513 /* 4,5 are reserved */
4514 /* 6,7 are implementation dependent */
4524 rn
= "Invalid config selector";
4531 gen_helper_mtc0_lladdr(cpu_env
, arg
);
4541 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
4551 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
4561 #if defined(TARGET_MIPS64)
4562 check_insn(env
, ctx
, ISA_MIPS3
);
4563 gen_helper_mtc0_xcontext(cpu_env
, arg
);
4572 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4575 gen_helper_mtc0_framemask(cpu_env
, arg
);
4584 rn
= "Diagnostic"; /* implementation dependent */
4589 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
4590 /* BS_STOP isn't good enough here, hflags may have changed. */
4591 gen_save_pc(ctx
->pc
+ 4);
4592 ctx
->bstate
= BS_EXCP
;
4596 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
4597 rn
= "TraceControl";
4598 /* Stop translation as we may have switched the execution mode */
4599 ctx
->bstate
= BS_STOP
;
4602 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
4603 rn
= "TraceControl2";
4604 /* Stop translation as we may have switched the execution mode */
4605 ctx
->bstate
= BS_STOP
;
4608 /* Stop translation as we may have switched the execution mode */
4609 ctx
->bstate
= BS_STOP
;
4610 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
4611 rn
= "UserTraceData";
4612 /* Stop translation as we may have switched the execution mode */
4613 ctx
->bstate
= BS_STOP
;
4616 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
4617 /* Stop translation as we may have switched the execution mode */
4618 ctx
->bstate
= BS_STOP
;
4629 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
4639 gen_helper_mtc0_performance0(cpu_env
, arg
);
4640 rn
= "Performance0";
4643 // gen_helper_mtc0_performance1(arg);
4644 rn
= "Performance1";
4647 // gen_helper_mtc0_performance2(arg);
4648 rn
= "Performance2";
4651 // gen_helper_mtc0_performance3(arg);
4652 rn
= "Performance3";
4655 // gen_helper_mtc0_performance4(arg);
4656 rn
= "Performance4";
4659 // gen_helper_mtc0_performance5(arg);
4660 rn
= "Performance5";
4663 // gen_helper_mtc0_performance6(arg);
4664 rn
= "Performance6";
4667 // gen_helper_mtc0_performance7(arg);
4668 rn
= "Performance7";
4694 gen_helper_mtc0_taglo(cpu_env
, arg
);
4701 gen_helper_mtc0_datalo(cpu_env
, arg
);
4714 gen_helper_mtc0_taghi(cpu_env
, arg
);
4721 gen_helper_mtc0_datahi(cpu_env
, arg
);
4732 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4743 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4749 /* Stop translation as we may have switched the execution mode */
4750 ctx
->bstate
= BS_STOP
;
4755 (void)rn
; /* avoid a compiler warning */
4756 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4757 /* For simplicity assume that all writes can cause interrupts. */
4760 ctx
->bstate
= BS_STOP
;
4765 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4766 generate_exception(ctx
, EXCP_RI
);
4769 #if defined(TARGET_MIPS64)
4770 static void gen_dmfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4772 const char *rn
= "invalid";
4775 check_insn(env
, ctx
, ISA_MIPS64
);
4781 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4785 check_insn(env
, ctx
, ASE_MT
);
4786 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4790 check_insn(env
, ctx
, ASE_MT
);
4791 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4795 check_insn(env
, ctx
, ASE_MT
);
4796 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4806 gen_helper_mfc0_random(arg
, cpu_env
);
4810 check_insn(env
, ctx
, ASE_MT
);
4811 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4815 check_insn(env
, ctx
, ASE_MT
);
4816 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4820 check_insn(env
, ctx
, ASE_MT
);
4821 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4825 check_insn(env
, ctx
, ASE_MT
);
4826 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
4830 check_insn(env
, ctx
, ASE_MT
);
4831 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4835 check_insn(env
, ctx
, ASE_MT
);
4836 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4837 rn
= "VPEScheFBack";
4840 check_insn(env
, ctx
, ASE_MT
);
4841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4851 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4855 check_insn(env
, ctx
, ASE_MT
);
4856 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
4860 check_insn(env
, ctx
, ASE_MT
);
4861 gen_helper_mfc0_tcbind(arg
, cpu_env
);
4865 check_insn(env
, ctx
, ASE_MT
);
4866 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
4870 check_insn(env
, ctx
, ASE_MT
);
4871 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
4875 check_insn(env
, ctx
, ASE_MT
);
4876 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
4880 check_insn(env
, ctx
, ASE_MT
);
4881 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
4885 check_insn(env
, ctx
, ASE_MT
);
4886 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
4896 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4906 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
4910 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4911 rn
= "ContextConfig";
4920 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
4924 check_insn(env
, ctx
, ISA_MIPS32R2
);
4925 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
4935 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
4939 check_insn(env
, ctx
, ISA_MIPS32R2
);
4940 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
4944 check_insn(env
, ctx
, ISA_MIPS32R2
);
4945 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
4949 check_insn(env
, ctx
, ISA_MIPS32R2
);
4950 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
4954 check_insn(env
, ctx
, ISA_MIPS32R2
);
4955 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
4959 check_insn(env
, ctx
, ISA_MIPS32R2
);
4960 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
4970 check_insn(env
, ctx
, ISA_MIPS32R2
);
4971 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
4981 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
4991 /* Mark as an IO operation because we read the time. */
4994 gen_helper_mfc0_count(arg
, cpu_env
);
4998 /* Break the TB to be able to take timer interrupts immediately
4999 after reading count. */
5000 ctx
->bstate
= BS_STOP
;
5003 /* 6,7 are implementation dependent */
5011 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5021 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5024 /* 6,7 are implementation dependent */
5032 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5036 check_insn(env
, ctx
, ISA_MIPS32R2
);
5037 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5041 check_insn(env
, ctx
, ISA_MIPS32R2
);
5042 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5046 check_insn(env
, ctx
, ISA_MIPS32R2
);
5047 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5057 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5067 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5077 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5081 check_insn(env
, ctx
, ISA_MIPS32R2
);
5082 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5092 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5104 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5107 /* 6,7 are implementation dependent */
5109 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5113 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5123 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
5133 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
5143 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5153 check_insn(env
, ctx
, ISA_MIPS3
);
5154 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5162 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5165 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5173 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5174 rn
= "'Diagnostic"; /* implementation dependent */
5179 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5183 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
5184 rn
= "TraceControl";
5187 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
5188 rn
= "TraceControl2";
5191 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
5192 rn
= "UserTraceData";
5195 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
5206 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5216 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5217 rn
= "Performance0";
5220 // gen_helper_dmfc0_performance1(arg);
5221 rn
= "Performance1";
5224 // gen_helper_dmfc0_performance2(arg);
5225 rn
= "Performance2";
5228 // gen_helper_dmfc0_performance3(arg);
5229 rn
= "Performance3";
5232 // gen_helper_dmfc0_performance4(arg);
5233 rn
= "Performance4";
5236 // gen_helper_dmfc0_performance5(arg);
5237 rn
= "Performance5";
5240 // gen_helper_dmfc0_performance6(arg);
5241 rn
= "Performance6";
5244 // gen_helper_dmfc0_performance7(arg);
5245 rn
= "Performance7";
5252 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5259 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5272 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5279 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5292 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5309 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5330 (void)rn
; /* avoid a compiler warning */
5331 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5335 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5336 generate_exception(ctx
, EXCP_RI
);
5339 static void gen_dmtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5341 const char *rn
= "invalid";
5344 check_insn(env
, ctx
, ISA_MIPS64
);
5353 gen_helper_mtc0_index(cpu_env
, arg
);
5357 check_insn(env
, ctx
, ASE_MT
);
5358 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5362 check_insn(env
, ctx
, ASE_MT
);
5367 check_insn(env
, ctx
, ASE_MT
);
5382 check_insn(env
, ctx
, ASE_MT
);
5383 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5387 check_insn(env
, ctx
, ASE_MT
);
5388 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5392 check_insn(env
, ctx
, ASE_MT
);
5393 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5397 check_insn(env
, ctx
, ASE_MT
);
5398 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5402 check_insn(env
, ctx
, ASE_MT
);
5403 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5407 check_insn(env
, ctx
, ASE_MT
);
5408 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5409 rn
= "VPEScheFBack";
5412 check_insn(env
, ctx
, ASE_MT
);
5413 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5423 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5427 check_insn(env
, ctx
, ASE_MT
);
5428 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5432 check_insn(env
, ctx
, ASE_MT
);
5433 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5437 check_insn(env
, ctx
, ASE_MT
);
5438 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5442 check_insn(env
, ctx
, ASE_MT
);
5443 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5447 check_insn(env
, ctx
, ASE_MT
);
5448 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5452 check_insn(env
, ctx
, ASE_MT
);
5453 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5457 check_insn(env
, ctx
, ASE_MT
);
5458 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5468 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5478 gen_helper_mtc0_context(cpu_env
, arg
);
5482 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5483 rn
= "ContextConfig";
5492 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5496 check_insn(env
, ctx
, ISA_MIPS32R2
);
5497 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5507 gen_helper_mtc0_wired(cpu_env
, arg
);
5511 check_insn(env
, ctx
, ISA_MIPS32R2
);
5512 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5516 check_insn(env
, ctx
, ISA_MIPS32R2
);
5517 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5521 check_insn(env
, ctx
, ISA_MIPS32R2
);
5522 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5526 check_insn(env
, ctx
, ISA_MIPS32R2
);
5527 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5531 check_insn(env
, ctx
, ISA_MIPS32R2
);
5532 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5542 check_insn(env
, ctx
, ISA_MIPS32R2
);
5543 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5557 gen_helper_mtc0_count(cpu_env
, arg
);
5560 /* 6,7 are implementation dependent */
5564 /* Stop translation as we may have switched the execution mode */
5565 ctx
->bstate
= BS_STOP
;
5570 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5580 gen_helper_mtc0_compare(cpu_env
, arg
);
5583 /* 6,7 are implementation dependent */
5587 /* Stop translation as we may have switched the execution mode */
5588 ctx
->bstate
= BS_STOP
;
5593 save_cpu_state(ctx
, 1);
5594 gen_helper_mtc0_status(cpu_env
, arg
);
5595 /* BS_STOP isn't good enough here, hflags may have changed. */
5596 gen_save_pc(ctx
->pc
+ 4);
5597 ctx
->bstate
= BS_EXCP
;
5601 check_insn(env
, ctx
, ISA_MIPS32R2
);
5602 gen_helper_mtc0_intctl(cpu_env
, arg
);
5603 /* Stop translation as we may have switched the execution mode */
5604 ctx
->bstate
= BS_STOP
;
5608 check_insn(env
, ctx
, ISA_MIPS32R2
);
5609 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5610 /* Stop translation as we may have switched the execution mode */
5611 ctx
->bstate
= BS_STOP
;
5615 check_insn(env
, ctx
, ISA_MIPS32R2
);
5616 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5617 /* Stop translation as we may have switched the execution mode */
5618 ctx
->bstate
= BS_STOP
;
5628 save_cpu_state(ctx
, 1);
5629 /* Mark as an IO operation because we may trigger a software
5634 gen_helper_mtc0_cause(cpu_env
, arg
);
5638 /* Stop translation as we may have triggered an intetrupt */
5639 ctx
->bstate
= BS_STOP
;
5649 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5663 check_insn(env
, ctx
, ISA_MIPS32R2
);
5664 gen_helper_mtc0_ebase(cpu_env
, arg
);
5674 gen_helper_mtc0_config0(cpu_env
, arg
);
5676 /* Stop translation as we may have switched the execution mode */
5677 ctx
->bstate
= BS_STOP
;
5680 /* ignored, read only */
5684 gen_helper_mtc0_config2(cpu_env
, arg
);
5686 /* Stop translation as we may have switched the execution mode */
5687 ctx
->bstate
= BS_STOP
;
5693 /* 6,7 are implementation dependent */
5695 rn
= "Invalid config selector";
5702 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5712 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5722 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5732 check_insn(env
, ctx
, ISA_MIPS3
);
5733 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5741 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5744 gen_helper_mtc0_framemask(cpu_env
, arg
);
5753 rn
= "Diagnostic"; /* implementation dependent */
5758 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
5759 /* BS_STOP isn't good enough here, hflags may have changed. */
5760 gen_save_pc(ctx
->pc
+ 4);
5761 ctx
->bstate
= BS_EXCP
;
5765 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
5766 /* Stop translation as we may have switched the execution mode */
5767 ctx
->bstate
= BS_STOP
;
5768 rn
= "TraceControl";
5771 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
5772 /* Stop translation as we may have switched the execution mode */
5773 ctx
->bstate
= BS_STOP
;
5774 rn
= "TraceControl2";
5777 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
5778 /* Stop translation as we may have switched the execution mode */
5779 ctx
->bstate
= BS_STOP
;
5780 rn
= "UserTraceData";
5783 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
5784 /* Stop translation as we may have switched the execution mode */
5785 ctx
->bstate
= BS_STOP
;
5796 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5806 gen_helper_mtc0_performance0(cpu_env
, arg
);
5807 rn
= "Performance0";
5810 // gen_helper_mtc0_performance1(cpu_env, arg);
5811 rn
= "Performance1";
5814 // gen_helper_mtc0_performance2(cpu_env, arg);
5815 rn
= "Performance2";
5818 // gen_helper_mtc0_performance3(cpu_env, arg);
5819 rn
= "Performance3";
5822 // gen_helper_mtc0_performance4(cpu_env, arg);
5823 rn
= "Performance4";
5826 // gen_helper_mtc0_performance5(cpu_env, arg);
5827 rn
= "Performance5";
5830 // gen_helper_mtc0_performance6(cpu_env, arg);
5831 rn
= "Performance6";
5834 // gen_helper_mtc0_performance7(cpu_env, arg);
5835 rn
= "Performance7";
5861 gen_helper_mtc0_taglo(cpu_env
, arg
);
5868 gen_helper_mtc0_datalo(cpu_env
, arg
);
5881 gen_helper_mtc0_taghi(cpu_env
, arg
);
5888 gen_helper_mtc0_datahi(cpu_env
, arg
);
5899 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5910 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5916 /* Stop translation as we may have switched the execution mode */
5917 ctx
->bstate
= BS_STOP
;
5922 (void)rn
; /* avoid a compiler warning */
5923 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5924 /* For simplicity assume that all writes can cause interrupts. */
5927 ctx
->bstate
= BS_STOP
;
5932 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5933 generate_exception(ctx
, EXCP_RI
);
5935 #endif /* TARGET_MIPS64 */
5937 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5938 int u
, int sel
, int h
)
5940 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5941 TCGv t0
= tcg_temp_local_new();
5943 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5944 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5945 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5946 tcg_gen_movi_tl(t0
, -1);
5947 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5948 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5949 tcg_gen_movi_tl(t0
, -1);
5955 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
5958 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
5968 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
5971 gen_helper_mftc0_tcbind(t0
, cpu_env
);
5974 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
5977 gen_helper_mftc0_tchalt(t0
, cpu_env
);
5980 gen_helper_mftc0_tccontext(t0
, cpu_env
);
5983 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
5986 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
5989 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5996 gen_helper_mftc0_entryhi(t0
, cpu_env
);
5999 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6005 gen_helper_mftc0_status(t0
, cpu_env
);
6008 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6014 gen_helper_mftc0_cause(t0
, cpu_env
);
6024 gen_helper_mftc0_epc(t0
, cpu_env
);
6034 gen_helper_mftc0_ebase(t0
, cpu_env
);
6044 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
6054 gen_helper_mftc0_debug(t0
, cpu_env
);
6057 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6062 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
6064 } else switch (sel
) {
6065 /* GPR registers. */
6067 gen_helper_1e0i(mftgpr
, t0
, rt
);
6069 /* Auxiliary CPU registers */
6073 gen_helper_1e0i(mftlo
, t0
, 0);
6076 gen_helper_1e0i(mfthi
, t0
, 0);
6079 gen_helper_1e0i(mftacx
, t0
, 0);
6082 gen_helper_1e0i(mftlo
, t0
, 1);
6085 gen_helper_1e0i(mfthi
, t0
, 1);
6088 gen_helper_1e0i(mftacx
, t0
, 1);
6091 gen_helper_1e0i(mftlo
, t0
, 2);
6094 gen_helper_1e0i(mfthi
, t0
, 2);
6097 gen_helper_1e0i(mftacx
, t0
, 2);
6100 gen_helper_1e0i(mftlo
, t0
, 3);
6103 gen_helper_1e0i(mfthi
, t0
, 3);
6106 gen_helper_1e0i(mftacx
, t0
, 3);
6109 gen_helper_mftdsp(t0
, cpu_env
);
6115 /* Floating point (COP1). */
6117 /* XXX: For now we support only a single FPU context. */
6119 TCGv_i32 fp0
= tcg_temp_new_i32();
6121 gen_load_fpr32(fp0
, rt
);
6122 tcg_gen_ext_i32_tl(t0
, fp0
);
6123 tcg_temp_free_i32(fp0
);
6125 TCGv_i32 fp0
= tcg_temp_new_i32();
6127 gen_load_fpr32h(fp0
, rt
);
6128 tcg_gen_ext_i32_tl(t0
, fp0
);
6129 tcg_temp_free_i32(fp0
);
6133 /* XXX: For now we support only a single FPU context. */
6134 gen_helper_1e0i(cfc1
, t0
, rt
);
6136 /* COP2: Not implemented. */
6143 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6144 gen_store_gpr(t0
, rd
);
6150 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
6151 generate_exception(ctx
, EXCP_RI
);
6154 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
6155 int u
, int sel
, int h
)
6157 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
6158 TCGv t0
= tcg_temp_local_new();
6160 gen_load_gpr(t0
, rt
);
6161 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
6162 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
6163 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
6165 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
6166 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
6173 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
6176 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
6186 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
6189 gen_helper_mttc0_tcbind(cpu_env
, t0
);
6192 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
6195 gen_helper_mttc0_tchalt(cpu_env
, t0
);
6198 gen_helper_mttc0_tccontext(cpu_env
, t0
);
6201 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
6204 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
6207 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6214 gen_helper_mttc0_entryhi(cpu_env
, t0
);
6217 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6223 gen_helper_mttc0_status(cpu_env
, t0
);
6226 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6232 gen_helper_mttc0_cause(cpu_env
, t0
);
6242 gen_helper_mttc0_ebase(cpu_env
, t0
);
6252 gen_helper_mttc0_debug(cpu_env
, t0
);
6255 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6260 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
6262 } else switch (sel
) {
6263 /* GPR registers. */
6265 gen_helper_0e1i(mttgpr
, t0
, rd
);
6267 /* Auxiliary CPU registers */
6271 gen_helper_0e1i(mttlo
, t0
, 0);
6274 gen_helper_0e1i(mtthi
, t0
, 0);
6277 gen_helper_0e1i(mttacx
, t0
, 0);
6280 gen_helper_0e1i(mttlo
, t0
, 1);
6283 gen_helper_0e1i(mtthi
, t0
, 1);
6286 gen_helper_0e1i(mttacx
, t0
, 1);
6289 gen_helper_0e1i(mttlo
, t0
, 2);
6292 gen_helper_0e1i(mtthi
, t0
, 2);
6295 gen_helper_0e1i(mttacx
, t0
, 2);
6298 gen_helper_0e1i(mttlo
, t0
, 3);
6301 gen_helper_0e1i(mtthi
, t0
, 3);
6304 gen_helper_0e1i(mttacx
, t0
, 3);
6307 gen_helper_mttdsp(cpu_env
, t0
);
6313 /* Floating point (COP1). */
6315 /* XXX: For now we support only a single FPU context. */
6317 TCGv_i32 fp0
= tcg_temp_new_i32();
6319 tcg_gen_trunc_tl_i32(fp0
, t0
);
6320 gen_store_fpr32(fp0
, rd
);
6321 tcg_temp_free_i32(fp0
);
6323 TCGv_i32 fp0
= tcg_temp_new_i32();
6325 tcg_gen_trunc_tl_i32(fp0
, t0
);
6326 gen_store_fpr32h(fp0
, rd
);
6327 tcg_temp_free_i32(fp0
);
6331 /* XXX: For now we support only a single FPU context. */
6332 gen_helper_0e1i(ctc1
, t0
, rd
);
6334 /* COP2: Not implemented. */
6341 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6347 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
6348 generate_exception(ctx
, EXCP_RI
);
6351 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
6353 const char *opn
= "ldst";
6355 check_cp0_enabled(ctx
);
6362 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6367 TCGv t0
= tcg_temp_new();
6369 gen_load_gpr(t0
, rt
);
6370 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6375 #if defined(TARGET_MIPS64)
6377 check_insn(env
, ctx
, ISA_MIPS3
);
6382 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
6386 check_insn(env
, ctx
, ISA_MIPS3
);
6388 TCGv t0
= tcg_temp_new();
6390 gen_load_gpr(t0
, rt
);
6391 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
6398 check_insn(env
, ctx
, ASE_MT
);
6403 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
6404 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6408 check_insn(env
, ctx
, ASE_MT
);
6409 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
6410 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
6415 if (!env
->tlb
->helper_tlbwi
)
6417 gen_helper_tlbwi(cpu_env
);
6421 if (!env
->tlb
->helper_tlbwr
)
6423 gen_helper_tlbwr(cpu_env
);
6427 if (!env
->tlb
->helper_tlbp
)
6429 gen_helper_tlbp(cpu_env
);
6433 if (!env
->tlb
->helper_tlbr
)
6435 gen_helper_tlbr(cpu_env
);
6439 check_insn(env
, ctx
, ISA_MIPS2
);
6440 gen_helper_eret(cpu_env
);
6441 ctx
->bstate
= BS_EXCP
;
6445 check_insn(env
, ctx
, ISA_MIPS32
);
6446 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6448 generate_exception(ctx
, EXCP_RI
);
6450 gen_helper_deret(cpu_env
);
6451 ctx
->bstate
= BS_EXCP
;
6456 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
6457 /* If we get an exception, we want to restart at next instruction */
6459 save_cpu_state(ctx
, 1);
6461 gen_helper_wait(cpu_env
);
6462 ctx
->bstate
= BS_EXCP
;
6467 generate_exception(ctx
, EXCP_RI
);
6470 (void)opn
; /* avoid a compiler warning */
6471 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6473 #endif /* !CONFIG_USER_ONLY */
6475 /* CP1 Branches (before delay slot) */
6476 static void gen_compute_branch1 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op
,
6477 int32_t cc
, int32_t offset
)
6479 target_ulong btarget
;
6480 const char *opn
= "cp1 cond branch";
6481 TCGv_i32 t0
= tcg_temp_new_i32();
6484 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
6486 btarget
= ctx
->pc
+ 4 + offset
;
6490 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6491 tcg_gen_not_i32(t0
, t0
);
6492 tcg_gen_andi_i32(t0
, t0
, 1);
6493 tcg_gen_extu_i32_tl(bcond
, t0
);
6497 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6498 tcg_gen_not_i32(t0
, t0
);
6499 tcg_gen_andi_i32(t0
, t0
, 1);
6500 tcg_gen_extu_i32_tl(bcond
, t0
);
6504 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6505 tcg_gen_andi_i32(t0
, t0
, 1);
6506 tcg_gen_extu_i32_tl(bcond
, t0
);
6510 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6511 tcg_gen_andi_i32(t0
, t0
, 1);
6512 tcg_gen_extu_i32_tl(bcond
, t0
);
6515 ctx
->hflags
|= MIPS_HFLAG_BL
;
6519 TCGv_i32 t1
= tcg_temp_new_i32();
6520 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6521 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6522 tcg_gen_nand_i32(t0
, t0
, t1
);
6523 tcg_temp_free_i32(t1
);
6524 tcg_gen_andi_i32(t0
, t0
, 1);
6525 tcg_gen_extu_i32_tl(bcond
, t0
);
6531 TCGv_i32 t1
= tcg_temp_new_i32();
6532 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6533 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6534 tcg_gen_or_i32(t0
, t0
, t1
);
6535 tcg_temp_free_i32(t1
);
6536 tcg_gen_andi_i32(t0
, t0
, 1);
6537 tcg_gen_extu_i32_tl(bcond
, t0
);
6543 TCGv_i32 t1
= tcg_temp_new_i32();
6544 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6545 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6546 tcg_gen_and_i32(t0
, t0
, t1
);
6547 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6548 tcg_gen_and_i32(t0
, t0
, t1
);
6549 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6550 tcg_gen_nand_i32(t0
, t0
, t1
);
6551 tcg_temp_free_i32(t1
);
6552 tcg_gen_andi_i32(t0
, t0
, 1);
6553 tcg_gen_extu_i32_tl(bcond
, t0
);
6559 TCGv_i32 t1
= tcg_temp_new_i32();
6560 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6561 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6562 tcg_gen_or_i32(t0
, t0
, t1
);
6563 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6564 tcg_gen_or_i32(t0
, t0
, t1
);
6565 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6566 tcg_gen_or_i32(t0
, t0
, t1
);
6567 tcg_temp_free_i32(t1
);
6568 tcg_gen_andi_i32(t0
, t0
, 1);
6569 tcg_gen_extu_i32_tl(bcond
, t0
);
6573 ctx
->hflags
|= MIPS_HFLAG_BC
;
6577 generate_exception (ctx
, EXCP_RI
);
6580 (void)opn
; /* avoid a compiler warning */
6581 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
6582 ctx
->hflags
, btarget
);
6583 ctx
->btarget
= btarget
;
6586 tcg_temp_free_i32(t0
);
6589 /* Coprocessor 1 (FPU) */
6591 #define FOP(func, fmt) (((fmt) << 21) | (func))
6594 OPC_ADD_S
= FOP(0, FMT_S
),
6595 OPC_SUB_S
= FOP(1, FMT_S
),
6596 OPC_MUL_S
= FOP(2, FMT_S
),
6597 OPC_DIV_S
= FOP(3, FMT_S
),
6598 OPC_SQRT_S
= FOP(4, FMT_S
),
6599 OPC_ABS_S
= FOP(5, FMT_S
),
6600 OPC_MOV_S
= FOP(6, FMT_S
),
6601 OPC_NEG_S
= FOP(7, FMT_S
),
6602 OPC_ROUND_L_S
= FOP(8, FMT_S
),
6603 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
6604 OPC_CEIL_L_S
= FOP(10, FMT_S
),
6605 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
6606 OPC_ROUND_W_S
= FOP(12, FMT_S
),
6607 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
6608 OPC_CEIL_W_S
= FOP(14, FMT_S
),
6609 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
6610 OPC_MOVCF_S
= FOP(17, FMT_S
),
6611 OPC_MOVZ_S
= FOP(18, FMT_S
),
6612 OPC_MOVN_S
= FOP(19, FMT_S
),
6613 OPC_RECIP_S
= FOP(21, FMT_S
),
6614 OPC_RSQRT_S
= FOP(22, FMT_S
),
6615 OPC_RECIP2_S
= FOP(28, FMT_S
),
6616 OPC_RECIP1_S
= FOP(29, FMT_S
),
6617 OPC_RSQRT1_S
= FOP(30, FMT_S
),
6618 OPC_RSQRT2_S
= FOP(31, FMT_S
),
6619 OPC_CVT_D_S
= FOP(33, FMT_S
),
6620 OPC_CVT_W_S
= FOP(36, FMT_S
),
6621 OPC_CVT_L_S
= FOP(37, FMT_S
),
6622 OPC_CVT_PS_S
= FOP(38, FMT_S
),
6623 OPC_CMP_F_S
= FOP (48, FMT_S
),
6624 OPC_CMP_UN_S
= FOP (49, FMT_S
),
6625 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
6626 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
6627 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
6628 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
6629 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
6630 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
6631 OPC_CMP_SF_S
= FOP (56, FMT_S
),
6632 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
6633 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
6634 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
6635 OPC_CMP_LT_S
= FOP (60, FMT_S
),
6636 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
6637 OPC_CMP_LE_S
= FOP (62, FMT_S
),
6638 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
6640 OPC_ADD_D
= FOP(0, FMT_D
),
6641 OPC_SUB_D
= FOP(1, FMT_D
),
6642 OPC_MUL_D
= FOP(2, FMT_D
),
6643 OPC_DIV_D
= FOP(3, FMT_D
),
6644 OPC_SQRT_D
= FOP(4, FMT_D
),
6645 OPC_ABS_D
= FOP(5, FMT_D
),
6646 OPC_MOV_D
= FOP(6, FMT_D
),
6647 OPC_NEG_D
= FOP(7, FMT_D
),
6648 OPC_ROUND_L_D
= FOP(8, FMT_D
),
6649 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
6650 OPC_CEIL_L_D
= FOP(10, FMT_D
),
6651 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
6652 OPC_ROUND_W_D
= FOP(12, FMT_D
),
6653 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
6654 OPC_CEIL_W_D
= FOP(14, FMT_D
),
6655 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
6656 OPC_MOVCF_D
= FOP(17, FMT_D
),
6657 OPC_MOVZ_D
= FOP(18, FMT_D
),
6658 OPC_MOVN_D
= FOP(19, FMT_D
),
6659 OPC_RECIP_D
= FOP(21, FMT_D
),
6660 OPC_RSQRT_D
= FOP(22, FMT_D
),
6661 OPC_RECIP2_D
= FOP(28, FMT_D
),
6662 OPC_RECIP1_D
= FOP(29, FMT_D
),
6663 OPC_RSQRT1_D
= FOP(30, FMT_D
),
6664 OPC_RSQRT2_D
= FOP(31, FMT_D
),
6665 OPC_CVT_S_D
= FOP(32, FMT_D
),
6666 OPC_CVT_W_D
= FOP(36, FMT_D
),
6667 OPC_CVT_L_D
= FOP(37, FMT_D
),
6668 OPC_CMP_F_D
= FOP (48, FMT_D
),
6669 OPC_CMP_UN_D
= FOP (49, FMT_D
),
6670 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
6671 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
6672 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
6673 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
6674 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
6675 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
6676 OPC_CMP_SF_D
= FOP (56, FMT_D
),
6677 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
6678 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
6679 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
6680 OPC_CMP_LT_D
= FOP (60, FMT_D
),
6681 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
6682 OPC_CMP_LE_D
= FOP (62, FMT_D
),
6683 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
6685 OPC_CVT_S_W
= FOP(32, FMT_W
),
6686 OPC_CVT_D_W
= FOP(33, FMT_W
),
6687 OPC_CVT_S_L
= FOP(32, FMT_L
),
6688 OPC_CVT_D_L
= FOP(33, FMT_L
),
6689 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
6691 OPC_ADD_PS
= FOP(0, FMT_PS
),
6692 OPC_SUB_PS
= FOP(1, FMT_PS
),
6693 OPC_MUL_PS
= FOP(2, FMT_PS
),
6694 OPC_DIV_PS
= FOP(3, FMT_PS
),
6695 OPC_ABS_PS
= FOP(5, FMT_PS
),
6696 OPC_MOV_PS
= FOP(6, FMT_PS
),
6697 OPC_NEG_PS
= FOP(7, FMT_PS
),
6698 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
6699 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
6700 OPC_MOVN_PS
= FOP(19, FMT_PS
),
6701 OPC_ADDR_PS
= FOP(24, FMT_PS
),
6702 OPC_MULR_PS
= FOP(26, FMT_PS
),
6703 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
6704 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
6705 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
6706 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
6708 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
6709 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
6710 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
6711 OPC_PLL_PS
= FOP(44, FMT_PS
),
6712 OPC_PLU_PS
= FOP(45, FMT_PS
),
6713 OPC_PUL_PS
= FOP(46, FMT_PS
),
6714 OPC_PUU_PS
= FOP(47, FMT_PS
),
6715 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
6716 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
6717 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
6718 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
6719 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
6720 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
6721 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
6722 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
6723 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
6724 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
6725 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
6726 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
6727 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
6728 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
6729 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
6730 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
6733 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
6735 const char *opn
= "cp1 move";
6736 TCGv t0
= tcg_temp_new();
6741 TCGv_i32 fp0
= tcg_temp_new_i32();
6743 gen_load_fpr32(fp0
, fs
);
6744 tcg_gen_ext_i32_tl(t0
, fp0
);
6745 tcg_temp_free_i32(fp0
);
6747 gen_store_gpr(t0
, rt
);
6751 gen_load_gpr(t0
, rt
);
6753 TCGv_i32 fp0
= tcg_temp_new_i32();
6755 tcg_gen_trunc_tl_i32(fp0
, t0
);
6756 gen_store_fpr32(fp0
, fs
);
6757 tcg_temp_free_i32(fp0
);
6762 gen_helper_1e0i(cfc1
, t0
, fs
);
6763 gen_store_gpr(t0
, rt
);
6767 gen_load_gpr(t0
, rt
);
6768 gen_helper_0e1i(ctc1
, t0
, fs
);
6771 #if defined(TARGET_MIPS64)
6773 gen_load_fpr64(ctx
, t0
, fs
);
6774 gen_store_gpr(t0
, rt
);
6778 gen_load_gpr(t0
, rt
);
6779 gen_store_fpr64(ctx
, t0
, fs
);
6785 TCGv_i32 fp0
= tcg_temp_new_i32();
6787 gen_load_fpr32h(fp0
, fs
);
6788 tcg_gen_ext_i32_tl(t0
, fp0
);
6789 tcg_temp_free_i32(fp0
);
6791 gen_store_gpr(t0
, rt
);
6795 gen_load_gpr(t0
, rt
);
6797 TCGv_i32 fp0
= tcg_temp_new_i32();
6799 tcg_gen_trunc_tl_i32(fp0
, t0
);
6800 gen_store_fpr32h(fp0
, fs
);
6801 tcg_temp_free_i32(fp0
);
6807 generate_exception (ctx
, EXCP_RI
);
6810 (void)opn
; /* avoid a compiler warning */
6811 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6817 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6833 l1
= gen_new_label();
6834 t0
= tcg_temp_new_i32();
6835 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6836 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6837 tcg_temp_free_i32(t0
);
6839 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6841 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6846 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6849 TCGv_i32 t0
= tcg_temp_new_i32();
6850 int l1
= gen_new_label();
6857 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6858 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6859 gen_load_fpr32(t0
, fs
);
6860 gen_store_fpr32(t0
, fd
);
6862 tcg_temp_free_i32(t0
);
6865 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6868 TCGv_i32 t0
= tcg_temp_new_i32();
6870 int l1
= gen_new_label();
6877 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6878 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6879 tcg_temp_free_i32(t0
);
6880 fp0
= tcg_temp_new_i64();
6881 gen_load_fpr64(ctx
, fp0
, fs
);
6882 gen_store_fpr64(ctx
, fp0
, fd
);
6883 tcg_temp_free_i64(fp0
);
6887 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6890 TCGv_i32 t0
= tcg_temp_new_i32();
6891 int l1
= gen_new_label();
6892 int l2
= gen_new_label();
6899 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6900 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6901 gen_load_fpr32(t0
, fs
);
6902 gen_store_fpr32(t0
, fd
);
6905 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6906 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6907 gen_load_fpr32h(t0
, fs
);
6908 gen_store_fpr32h(t0
, fd
);
6909 tcg_temp_free_i32(t0
);
6914 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6915 int ft
, int fs
, int fd
, int cc
)
6917 const char *opn
= "farith";
6918 const char *condnames
[] = {
6936 const char *condnames_abs
[] = {
6954 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6955 uint32_t func
= ctx
->opcode
& 0x3f;
6960 TCGv_i32 fp0
= tcg_temp_new_i32();
6961 TCGv_i32 fp1
= tcg_temp_new_i32();
6963 gen_load_fpr32(fp0
, fs
);
6964 gen_load_fpr32(fp1
, ft
);
6965 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
6966 tcg_temp_free_i32(fp1
);
6967 gen_store_fpr32(fp0
, fd
);
6968 tcg_temp_free_i32(fp0
);
6975 TCGv_i32 fp0
= tcg_temp_new_i32();
6976 TCGv_i32 fp1
= tcg_temp_new_i32();
6978 gen_load_fpr32(fp0
, fs
);
6979 gen_load_fpr32(fp1
, ft
);
6980 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
6981 tcg_temp_free_i32(fp1
);
6982 gen_store_fpr32(fp0
, fd
);
6983 tcg_temp_free_i32(fp0
);
6990 TCGv_i32 fp0
= tcg_temp_new_i32();
6991 TCGv_i32 fp1
= tcg_temp_new_i32();
6993 gen_load_fpr32(fp0
, fs
);
6994 gen_load_fpr32(fp1
, ft
);
6995 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
6996 tcg_temp_free_i32(fp1
);
6997 gen_store_fpr32(fp0
, fd
);
6998 tcg_temp_free_i32(fp0
);
7005 TCGv_i32 fp0
= tcg_temp_new_i32();
7006 TCGv_i32 fp1
= tcg_temp_new_i32();
7008 gen_load_fpr32(fp0
, fs
);
7009 gen_load_fpr32(fp1
, ft
);
7010 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
7011 tcg_temp_free_i32(fp1
);
7012 gen_store_fpr32(fp0
, fd
);
7013 tcg_temp_free_i32(fp0
);
7020 TCGv_i32 fp0
= tcg_temp_new_i32();
7022 gen_load_fpr32(fp0
, fs
);
7023 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
7024 gen_store_fpr32(fp0
, fd
);
7025 tcg_temp_free_i32(fp0
);
7031 TCGv_i32 fp0
= tcg_temp_new_i32();
7033 gen_load_fpr32(fp0
, fs
);
7034 gen_helper_float_abs_s(fp0
, fp0
);
7035 gen_store_fpr32(fp0
, fd
);
7036 tcg_temp_free_i32(fp0
);
7042 TCGv_i32 fp0
= tcg_temp_new_i32();
7044 gen_load_fpr32(fp0
, fs
);
7045 gen_store_fpr32(fp0
, fd
);
7046 tcg_temp_free_i32(fp0
);
7052 TCGv_i32 fp0
= tcg_temp_new_i32();
7054 gen_load_fpr32(fp0
, fs
);
7055 gen_helper_float_chs_s(fp0
, fp0
);
7056 gen_store_fpr32(fp0
, fd
);
7057 tcg_temp_free_i32(fp0
);
7062 check_cp1_64bitmode(ctx
);
7064 TCGv_i32 fp32
= tcg_temp_new_i32();
7065 TCGv_i64 fp64
= tcg_temp_new_i64();
7067 gen_load_fpr32(fp32
, fs
);
7068 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
7069 tcg_temp_free_i32(fp32
);
7070 gen_store_fpr64(ctx
, fp64
, fd
);
7071 tcg_temp_free_i64(fp64
);
7076 check_cp1_64bitmode(ctx
);
7078 TCGv_i32 fp32
= tcg_temp_new_i32();
7079 TCGv_i64 fp64
= tcg_temp_new_i64();
7081 gen_load_fpr32(fp32
, fs
);
7082 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
7083 tcg_temp_free_i32(fp32
);
7084 gen_store_fpr64(ctx
, fp64
, fd
);
7085 tcg_temp_free_i64(fp64
);
7090 check_cp1_64bitmode(ctx
);
7092 TCGv_i32 fp32
= tcg_temp_new_i32();
7093 TCGv_i64 fp64
= tcg_temp_new_i64();
7095 gen_load_fpr32(fp32
, fs
);
7096 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
7097 tcg_temp_free_i32(fp32
);
7098 gen_store_fpr64(ctx
, fp64
, fd
);
7099 tcg_temp_free_i64(fp64
);
7104 check_cp1_64bitmode(ctx
);
7106 TCGv_i32 fp32
= tcg_temp_new_i32();
7107 TCGv_i64 fp64
= tcg_temp_new_i64();
7109 gen_load_fpr32(fp32
, fs
);
7110 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
7111 tcg_temp_free_i32(fp32
);
7112 gen_store_fpr64(ctx
, fp64
, fd
);
7113 tcg_temp_free_i64(fp64
);
7119 TCGv_i32 fp0
= tcg_temp_new_i32();
7121 gen_load_fpr32(fp0
, fs
);
7122 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
7123 gen_store_fpr32(fp0
, fd
);
7124 tcg_temp_free_i32(fp0
);
7130 TCGv_i32 fp0
= tcg_temp_new_i32();
7132 gen_load_fpr32(fp0
, fs
);
7133 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
7134 gen_store_fpr32(fp0
, fd
);
7135 tcg_temp_free_i32(fp0
);
7141 TCGv_i32 fp0
= tcg_temp_new_i32();
7143 gen_load_fpr32(fp0
, fs
);
7144 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
7145 gen_store_fpr32(fp0
, fd
);
7146 tcg_temp_free_i32(fp0
);
7152 TCGv_i32 fp0
= tcg_temp_new_i32();
7154 gen_load_fpr32(fp0
, fs
);
7155 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
7156 gen_store_fpr32(fp0
, fd
);
7157 tcg_temp_free_i32(fp0
);
7162 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7167 int l1
= gen_new_label();
7171 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7173 fp0
= tcg_temp_new_i32();
7174 gen_load_fpr32(fp0
, fs
);
7175 gen_store_fpr32(fp0
, fd
);
7176 tcg_temp_free_i32(fp0
);
7183 int l1
= gen_new_label();
7187 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7188 fp0
= tcg_temp_new_i32();
7189 gen_load_fpr32(fp0
, fs
);
7190 gen_store_fpr32(fp0
, fd
);
7191 tcg_temp_free_i32(fp0
);
7200 TCGv_i32 fp0
= tcg_temp_new_i32();
7202 gen_load_fpr32(fp0
, fs
);
7203 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
7204 gen_store_fpr32(fp0
, fd
);
7205 tcg_temp_free_i32(fp0
);
7212 TCGv_i32 fp0
= tcg_temp_new_i32();
7214 gen_load_fpr32(fp0
, fs
);
7215 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
7216 gen_store_fpr32(fp0
, fd
);
7217 tcg_temp_free_i32(fp0
);
7222 check_cp1_64bitmode(ctx
);
7224 TCGv_i32 fp0
= tcg_temp_new_i32();
7225 TCGv_i32 fp1
= tcg_temp_new_i32();
7227 gen_load_fpr32(fp0
, fs
);
7228 gen_load_fpr32(fp1
, ft
);
7229 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
7230 tcg_temp_free_i32(fp1
);
7231 gen_store_fpr32(fp0
, fd
);
7232 tcg_temp_free_i32(fp0
);
7237 check_cp1_64bitmode(ctx
);
7239 TCGv_i32 fp0
= tcg_temp_new_i32();
7241 gen_load_fpr32(fp0
, fs
);
7242 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
7243 gen_store_fpr32(fp0
, fd
);
7244 tcg_temp_free_i32(fp0
);
7249 check_cp1_64bitmode(ctx
);
7251 TCGv_i32 fp0
= tcg_temp_new_i32();
7253 gen_load_fpr32(fp0
, fs
);
7254 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
7255 gen_store_fpr32(fp0
, fd
);
7256 tcg_temp_free_i32(fp0
);
7261 check_cp1_64bitmode(ctx
);
7263 TCGv_i32 fp0
= tcg_temp_new_i32();
7264 TCGv_i32 fp1
= tcg_temp_new_i32();
7266 gen_load_fpr32(fp0
, fs
);
7267 gen_load_fpr32(fp1
, ft
);
7268 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
7269 tcg_temp_free_i32(fp1
);
7270 gen_store_fpr32(fp0
, fd
);
7271 tcg_temp_free_i32(fp0
);
7276 check_cp1_registers(ctx
, fd
);
7278 TCGv_i32 fp32
= tcg_temp_new_i32();
7279 TCGv_i64 fp64
= tcg_temp_new_i64();
7281 gen_load_fpr32(fp32
, fs
);
7282 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
7283 tcg_temp_free_i32(fp32
);
7284 gen_store_fpr64(ctx
, fp64
, fd
);
7285 tcg_temp_free_i64(fp64
);
7291 TCGv_i32 fp0
= tcg_temp_new_i32();
7293 gen_load_fpr32(fp0
, fs
);
7294 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
7295 gen_store_fpr32(fp0
, fd
);
7296 tcg_temp_free_i32(fp0
);
7301 check_cp1_64bitmode(ctx
);
7303 TCGv_i32 fp32
= tcg_temp_new_i32();
7304 TCGv_i64 fp64
= tcg_temp_new_i64();
7306 gen_load_fpr32(fp32
, fs
);
7307 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
7308 tcg_temp_free_i32(fp32
);
7309 gen_store_fpr64(ctx
, fp64
, fd
);
7310 tcg_temp_free_i64(fp64
);
7315 check_cp1_64bitmode(ctx
);
7317 TCGv_i64 fp64
= tcg_temp_new_i64();
7318 TCGv_i32 fp32_0
= tcg_temp_new_i32();
7319 TCGv_i32 fp32_1
= tcg_temp_new_i32();
7321 gen_load_fpr32(fp32_0
, fs
);
7322 gen_load_fpr32(fp32_1
, ft
);
7323 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
7324 tcg_temp_free_i32(fp32_1
);
7325 tcg_temp_free_i32(fp32_0
);
7326 gen_store_fpr64(ctx
, fp64
, fd
);
7327 tcg_temp_free_i64(fp64
);
7340 case OPC_CMP_NGLE_S
:
7347 if (ctx
->opcode
& (1 << 6)) {
7348 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
7349 opn
= condnames_abs
[func
-48];
7351 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
7352 opn
= condnames
[func
-48];
7356 check_cp1_registers(ctx
, fs
| ft
| fd
);
7358 TCGv_i64 fp0
= tcg_temp_new_i64();
7359 TCGv_i64 fp1
= tcg_temp_new_i64();
7361 gen_load_fpr64(ctx
, fp0
, fs
);
7362 gen_load_fpr64(ctx
, fp1
, ft
);
7363 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
7364 tcg_temp_free_i64(fp1
);
7365 gen_store_fpr64(ctx
, fp0
, fd
);
7366 tcg_temp_free_i64(fp0
);
7372 check_cp1_registers(ctx
, fs
| ft
| fd
);
7374 TCGv_i64 fp0
= tcg_temp_new_i64();
7375 TCGv_i64 fp1
= tcg_temp_new_i64();
7377 gen_load_fpr64(ctx
, fp0
, fs
);
7378 gen_load_fpr64(ctx
, fp1
, ft
);
7379 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
7380 tcg_temp_free_i64(fp1
);
7381 gen_store_fpr64(ctx
, fp0
, fd
);
7382 tcg_temp_free_i64(fp0
);
7388 check_cp1_registers(ctx
, fs
| ft
| fd
);
7390 TCGv_i64 fp0
= tcg_temp_new_i64();
7391 TCGv_i64 fp1
= tcg_temp_new_i64();
7393 gen_load_fpr64(ctx
, fp0
, fs
);
7394 gen_load_fpr64(ctx
, fp1
, ft
);
7395 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
7396 tcg_temp_free_i64(fp1
);
7397 gen_store_fpr64(ctx
, fp0
, fd
);
7398 tcg_temp_free_i64(fp0
);
7404 check_cp1_registers(ctx
, fs
| ft
| fd
);
7406 TCGv_i64 fp0
= tcg_temp_new_i64();
7407 TCGv_i64 fp1
= tcg_temp_new_i64();
7409 gen_load_fpr64(ctx
, fp0
, fs
);
7410 gen_load_fpr64(ctx
, fp1
, ft
);
7411 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
7412 tcg_temp_free_i64(fp1
);
7413 gen_store_fpr64(ctx
, fp0
, fd
);
7414 tcg_temp_free_i64(fp0
);
7420 check_cp1_registers(ctx
, fs
| fd
);
7422 TCGv_i64 fp0
= tcg_temp_new_i64();
7424 gen_load_fpr64(ctx
, fp0
, fs
);
7425 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
7426 gen_store_fpr64(ctx
, fp0
, fd
);
7427 tcg_temp_free_i64(fp0
);
7432 check_cp1_registers(ctx
, fs
| fd
);
7434 TCGv_i64 fp0
= tcg_temp_new_i64();
7436 gen_load_fpr64(ctx
, fp0
, fs
);
7437 gen_helper_float_abs_d(fp0
, fp0
);
7438 gen_store_fpr64(ctx
, fp0
, fd
);
7439 tcg_temp_free_i64(fp0
);
7444 check_cp1_registers(ctx
, fs
| fd
);
7446 TCGv_i64 fp0
= tcg_temp_new_i64();
7448 gen_load_fpr64(ctx
, fp0
, fs
);
7449 gen_store_fpr64(ctx
, fp0
, fd
);
7450 tcg_temp_free_i64(fp0
);
7455 check_cp1_registers(ctx
, fs
| fd
);
7457 TCGv_i64 fp0
= tcg_temp_new_i64();
7459 gen_load_fpr64(ctx
, fp0
, fs
);
7460 gen_helper_float_chs_d(fp0
, fp0
);
7461 gen_store_fpr64(ctx
, fp0
, fd
);
7462 tcg_temp_free_i64(fp0
);
7467 check_cp1_64bitmode(ctx
);
7469 TCGv_i64 fp0
= tcg_temp_new_i64();
7471 gen_load_fpr64(ctx
, fp0
, fs
);
7472 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
7473 gen_store_fpr64(ctx
, fp0
, fd
);
7474 tcg_temp_free_i64(fp0
);
7479 check_cp1_64bitmode(ctx
);
7481 TCGv_i64 fp0
= tcg_temp_new_i64();
7483 gen_load_fpr64(ctx
, fp0
, fs
);
7484 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
7485 gen_store_fpr64(ctx
, fp0
, fd
);
7486 tcg_temp_free_i64(fp0
);
7491 check_cp1_64bitmode(ctx
);
7493 TCGv_i64 fp0
= tcg_temp_new_i64();
7495 gen_load_fpr64(ctx
, fp0
, fs
);
7496 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
7497 gen_store_fpr64(ctx
, fp0
, fd
);
7498 tcg_temp_free_i64(fp0
);
7503 check_cp1_64bitmode(ctx
);
7505 TCGv_i64 fp0
= tcg_temp_new_i64();
7507 gen_load_fpr64(ctx
, fp0
, fs
);
7508 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
7509 gen_store_fpr64(ctx
, fp0
, fd
);
7510 tcg_temp_free_i64(fp0
);
7515 check_cp1_registers(ctx
, fs
);
7517 TCGv_i32 fp32
= tcg_temp_new_i32();
7518 TCGv_i64 fp64
= tcg_temp_new_i64();
7520 gen_load_fpr64(ctx
, fp64
, fs
);
7521 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
7522 tcg_temp_free_i64(fp64
);
7523 gen_store_fpr32(fp32
, fd
);
7524 tcg_temp_free_i32(fp32
);
7529 check_cp1_registers(ctx
, fs
);
7531 TCGv_i32 fp32
= tcg_temp_new_i32();
7532 TCGv_i64 fp64
= tcg_temp_new_i64();
7534 gen_load_fpr64(ctx
, fp64
, fs
);
7535 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
7536 tcg_temp_free_i64(fp64
);
7537 gen_store_fpr32(fp32
, fd
);
7538 tcg_temp_free_i32(fp32
);
7543 check_cp1_registers(ctx
, fs
);
7545 TCGv_i32 fp32
= tcg_temp_new_i32();
7546 TCGv_i64 fp64
= tcg_temp_new_i64();
7548 gen_load_fpr64(ctx
, fp64
, fs
);
7549 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
7550 tcg_temp_free_i64(fp64
);
7551 gen_store_fpr32(fp32
, fd
);
7552 tcg_temp_free_i32(fp32
);
7557 check_cp1_registers(ctx
, fs
);
7559 TCGv_i32 fp32
= tcg_temp_new_i32();
7560 TCGv_i64 fp64
= tcg_temp_new_i64();
7562 gen_load_fpr64(ctx
, fp64
, fs
);
7563 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
7564 tcg_temp_free_i64(fp64
);
7565 gen_store_fpr32(fp32
, fd
);
7566 tcg_temp_free_i32(fp32
);
7571 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7576 int l1
= gen_new_label();
7580 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7582 fp0
= tcg_temp_new_i64();
7583 gen_load_fpr64(ctx
, fp0
, fs
);
7584 gen_store_fpr64(ctx
, fp0
, fd
);
7585 tcg_temp_free_i64(fp0
);
7592 int l1
= gen_new_label();
7596 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7597 fp0
= tcg_temp_new_i64();
7598 gen_load_fpr64(ctx
, fp0
, fs
);
7599 gen_store_fpr64(ctx
, fp0
, fd
);
7600 tcg_temp_free_i64(fp0
);
7607 check_cp1_64bitmode(ctx
);
7609 TCGv_i64 fp0
= tcg_temp_new_i64();
7611 gen_load_fpr64(ctx
, fp0
, fs
);
7612 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
7613 gen_store_fpr64(ctx
, fp0
, fd
);
7614 tcg_temp_free_i64(fp0
);
7619 check_cp1_64bitmode(ctx
);
7621 TCGv_i64 fp0
= tcg_temp_new_i64();
7623 gen_load_fpr64(ctx
, fp0
, fs
);
7624 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
7625 gen_store_fpr64(ctx
, fp0
, fd
);
7626 tcg_temp_free_i64(fp0
);
7631 check_cp1_64bitmode(ctx
);
7633 TCGv_i64 fp0
= tcg_temp_new_i64();
7634 TCGv_i64 fp1
= tcg_temp_new_i64();
7636 gen_load_fpr64(ctx
, fp0
, fs
);
7637 gen_load_fpr64(ctx
, fp1
, ft
);
7638 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
7639 tcg_temp_free_i64(fp1
);
7640 gen_store_fpr64(ctx
, fp0
, fd
);
7641 tcg_temp_free_i64(fp0
);
7646 check_cp1_64bitmode(ctx
);
7648 TCGv_i64 fp0
= tcg_temp_new_i64();
7650 gen_load_fpr64(ctx
, fp0
, fs
);
7651 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
7652 gen_store_fpr64(ctx
, fp0
, fd
);
7653 tcg_temp_free_i64(fp0
);
7658 check_cp1_64bitmode(ctx
);
7660 TCGv_i64 fp0
= tcg_temp_new_i64();
7662 gen_load_fpr64(ctx
, fp0
, fs
);
7663 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
7664 gen_store_fpr64(ctx
, fp0
, fd
);
7665 tcg_temp_free_i64(fp0
);
7670 check_cp1_64bitmode(ctx
);
7672 TCGv_i64 fp0
= tcg_temp_new_i64();
7673 TCGv_i64 fp1
= tcg_temp_new_i64();
7675 gen_load_fpr64(ctx
, fp0
, fs
);
7676 gen_load_fpr64(ctx
, fp1
, ft
);
7677 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
7678 tcg_temp_free_i64(fp1
);
7679 gen_store_fpr64(ctx
, fp0
, fd
);
7680 tcg_temp_free_i64(fp0
);
7693 case OPC_CMP_NGLE_D
:
7700 if (ctx
->opcode
& (1 << 6)) {
7701 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
7702 opn
= condnames_abs
[func
-48];
7704 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
7705 opn
= condnames
[func
-48];
7709 check_cp1_registers(ctx
, fs
);
7711 TCGv_i32 fp32
= tcg_temp_new_i32();
7712 TCGv_i64 fp64
= tcg_temp_new_i64();
7714 gen_load_fpr64(ctx
, fp64
, fs
);
7715 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
7716 tcg_temp_free_i64(fp64
);
7717 gen_store_fpr32(fp32
, fd
);
7718 tcg_temp_free_i32(fp32
);
7723 check_cp1_registers(ctx
, fs
);
7725 TCGv_i32 fp32
= tcg_temp_new_i32();
7726 TCGv_i64 fp64
= tcg_temp_new_i64();
7728 gen_load_fpr64(ctx
, fp64
, fs
);
7729 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
7730 tcg_temp_free_i64(fp64
);
7731 gen_store_fpr32(fp32
, fd
);
7732 tcg_temp_free_i32(fp32
);
7737 check_cp1_64bitmode(ctx
);
7739 TCGv_i64 fp0
= tcg_temp_new_i64();
7741 gen_load_fpr64(ctx
, fp0
, fs
);
7742 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
7743 gen_store_fpr64(ctx
, fp0
, fd
);
7744 tcg_temp_free_i64(fp0
);
7750 TCGv_i32 fp0
= tcg_temp_new_i32();
7752 gen_load_fpr32(fp0
, fs
);
7753 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
7754 gen_store_fpr32(fp0
, fd
);
7755 tcg_temp_free_i32(fp0
);
7760 check_cp1_registers(ctx
, fd
);
7762 TCGv_i32 fp32
= tcg_temp_new_i32();
7763 TCGv_i64 fp64
= tcg_temp_new_i64();
7765 gen_load_fpr32(fp32
, fs
);
7766 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
7767 tcg_temp_free_i32(fp32
);
7768 gen_store_fpr64(ctx
, fp64
, fd
);
7769 tcg_temp_free_i64(fp64
);
7774 check_cp1_64bitmode(ctx
);
7776 TCGv_i32 fp32
= tcg_temp_new_i32();
7777 TCGv_i64 fp64
= tcg_temp_new_i64();
7779 gen_load_fpr64(ctx
, fp64
, fs
);
7780 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
7781 tcg_temp_free_i64(fp64
);
7782 gen_store_fpr32(fp32
, fd
);
7783 tcg_temp_free_i32(fp32
);
7788 check_cp1_64bitmode(ctx
);
7790 TCGv_i64 fp0
= tcg_temp_new_i64();
7792 gen_load_fpr64(ctx
, fp0
, fs
);
7793 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
7794 gen_store_fpr64(ctx
, fp0
, fd
);
7795 tcg_temp_free_i64(fp0
);
7800 check_cp1_64bitmode(ctx
);
7802 TCGv_i64 fp0
= tcg_temp_new_i64();
7804 gen_load_fpr64(ctx
, fp0
, fs
);
7805 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
7806 gen_store_fpr64(ctx
, fp0
, fd
);
7807 tcg_temp_free_i64(fp0
);
7812 check_cp1_64bitmode(ctx
);
7814 TCGv_i64 fp0
= tcg_temp_new_i64();
7815 TCGv_i64 fp1
= tcg_temp_new_i64();
7817 gen_load_fpr64(ctx
, fp0
, fs
);
7818 gen_load_fpr64(ctx
, fp1
, ft
);
7819 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
7820 tcg_temp_free_i64(fp1
);
7821 gen_store_fpr64(ctx
, fp0
, fd
);
7822 tcg_temp_free_i64(fp0
);
7827 check_cp1_64bitmode(ctx
);
7829 TCGv_i64 fp0
= tcg_temp_new_i64();
7830 TCGv_i64 fp1
= tcg_temp_new_i64();
7832 gen_load_fpr64(ctx
, fp0
, fs
);
7833 gen_load_fpr64(ctx
, fp1
, ft
);
7834 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
7835 tcg_temp_free_i64(fp1
);
7836 gen_store_fpr64(ctx
, fp0
, fd
);
7837 tcg_temp_free_i64(fp0
);
7842 check_cp1_64bitmode(ctx
);
7844 TCGv_i64 fp0
= tcg_temp_new_i64();
7845 TCGv_i64 fp1
= tcg_temp_new_i64();
7847 gen_load_fpr64(ctx
, fp0
, fs
);
7848 gen_load_fpr64(ctx
, fp1
, ft
);
7849 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
7850 tcg_temp_free_i64(fp1
);
7851 gen_store_fpr64(ctx
, fp0
, fd
);
7852 tcg_temp_free_i64(fp0
);
7857 check_cp1_64bitmode(ctx
);
7859 TCGv_i64 fp0
= tcg_temp_new_i64();
7861 gen_load_fpr64(ctx
, fp0
, fs
);
7862 gen_helper_float_abs_ps(fp0
, fp0
);
7863 gen_store_fpr64(ctx
, fp0
, fd
);
7864 tcg_temp_free_i64(fp0
);
7869 check_cp1_64bitmode(ctx
);
7871 TCGv_i64 fp0
= tcg_temp_new_i64();
7873 gen_load_fpr64(ctx
, fp0
, fs
);
7874 gen_store_fpr64(ctx
, fp0
, fd
);
7875 tcg_temp_free_i64(fp0
);
7880 check_cp1_64bitmode(ctx
);
7882 TCGv_i64 fp0
= tcg_temp_new_i64();
7884 gen_load_fpr64(ctx
, fp0
, fs
);
7885 gen_helper_float_chs_ps(fp0
, fp0
);
7886 gen_store_fpr64(ctx
, fp0
, fd
);
7887 tcg_temp_free_i64(fp0
);
7892 check_cp1_64bitmode(ctx
);
7893 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7897 check_cp1_64bitmode(ctx
);
7899 int l1
= gen_new_label();
7903 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7904 fp0
= tcg_temp_new_i64();
7905 gen_load_fpr64(ctx
, fp0
, fs
);
7906 gen_store_fpr64(ctx
, fp0
, fd
);
7907 tcg_temp_free_i64(fp0
);
7913 check_cp1_64bitmode(ctx
);
7915 int l1
= gen_new_label();
7919 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7920 fp0
= tcg_temp_new_i64();
7921 gen_load_fpr64(ctx
, fp0
, fs
);
7922 gen_store_fpr64(ctx
, fp0
, fd
);
7923 tcg_temp_free_i64(fp0
);
7930 check_cp1_64bitmode(ctx
);
7932 TCGv_i64 fp0
= tcg_temp_new_i64();
7933 TCGv_i64 fp1
= tcg_temp_new_i64();
7935 gen_load_fpr64(ctx
, fp0
, ft
);
7936 gen_load_fpr64(ctx
, fp1
, fs
);
7937 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
7938 tcg_temp_free_i64(fp1
);
7939 gen_store_fpr64(ctx
, fp0
, fd
);
7940 tcg_temp_free_i64(fp0
);
7945 check_cp1_64bitmode(ctx
);
7947 TCGv_i64 fp0
= tcg_temp_new_i64();
7948 TCGv_i64 fp1
= tcg_temp_new_i64();
7950 gen_load_fpr64(ctx
, fp0
, ft
);
7951 gen_load_fpr64(ctx
, fp1
, fs
);
7952 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
7953 tcg_temp_free_i64(fp1
);
7954 gen_store_fpr64(ctx
, fp0
, fd
);
7955 tcg_temp_free_i64(fp0
);
7960 check_cp1_64bitmode(ctx
);
7962 TCGv_i64 fp0
= tcg_temp_new_i64();
7963 TCGv_i64 fp1
= tcg_temp_new_i64();
7965 gen_load_fpr64(ctx
, fp0
, fs
);
7966 gen_load_fpr64(ctx
, fp1
, ft
);
7967 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
7968 tcg_temp_free_i64(fp1
);
7969 gen_store_fpr64(ctx
, fp0
, fd
);
7970 tcg_temp_free_i64(fp0
);
7975 check_cp1_64bitmode(ctx
);
7977 TCGv_i64 fp0
= tcg_temp_new_i64();
7979 gen_load_fpr64(ctx
, fp0
, fs
);
7980 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
7981 gen_store_fpr64(ctx
, fp0
, fd
);
7982 tcg_temp_free_i64(fp0
);
7987 check_cp1_64bitmode(ctx
);
7989 TCGv_i64 fp0
= tcg_temp_new_i64();
7991 gen_load_fpr64(ctx
, fp0
, fs
);
7992 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
7993 gen_store_fpr64(ctx
, fp0
, fd
);
7994 tcg_temp_free_i64(fp0
);
7999 check_cp1_64bitmode(ctx
);
8001 TCGv_i64 fp0
= tcg_temp_new_i64();
8002 TCGv_i64 fp1
= tcg_temp_new_i64();
8004 gen_load_fpr64(ctx
, fp0
, fs
);
8005 gen_load_fpr64(ctx
, fp1
, ft
);
8006 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
8007 tcg_temp_free_i64(fp1
);
8008 gen_store_fpr64(ctx
, fp0
, fd
);
8009 tcg_temp_free_i64(fp0
);
8014 check_cp1_64bitmode(ctx
);
8016 TCGv_i32 fp0
= tcg_temp_new_i32();
8018 gen_load_fpr32h(fp0
, fs
);
8019 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
8020 gen_store_fpr32(fp0
, fd
);
8021 tcg_temp_free_i32(fp0
);
8026 check_cp1_64bitmode(ctx
);
8028 TCGv_i64 fp0
= tcg_temp_new_i64();
8030 gen_load_fpr64(ctx
, fp0
, fs
);
8031 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
8032 gen_store_fpr64(ctx
, fp0
, fd
);
8033 tcg_temp_free_i64(fp0
);
8038 check_cp1_64bitmode(ctx
);
8040 TCGv_i32 fp0
= tcg_temp_new_i32();
8042 gen_load_fpr32(fp0
, fs
);
8043 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
8044 gen_store_fpr32(fp0
, fd
);
8045 tcg_temp_free_i32(fp0
);
8050 check_cp1_64bitmode(ctx
);
8052 TCGv_i32 fp0
= tcg_temp_new_i32();
8053 TCGv_i32 fp1
= tcg_temp_new_i32();
8055 gen_load_fpr32(fp0
, fs
);
8056 gen_load_fpr32(fp1
, ft
);
8057 gen_store_fpr32h(fp0
, fd
);
8058 gen_store_fpr32(fp1
, fd
);
8059 tcg_temp_free_i32(fp0
);
8060 tcg_temp_free_i32(fp1
);
8065 check_cp1_64bitmode(ctx
);
8067 TCGv_i32 fp0
= tcg_temp_new_i32();
8068 TCGv_i32 fp1
= tcg_temp_new_i32();
8070 gen_load_fpr32(fp0
, fs
);
8071 gen_load_fpr32h(fp1
, ft
);
8072 gen_store_fpr32(fp1
, fd
);
8073 gen_store_fpr32h(fp0
, fd
);
8074 tcg_temp_free_i32(fp0
);
8075 tcg_temp_free_i32(fp1
);
8080 check_cp1_64bitmode(ctx
);
8082 TCGv_i32 fp0
= tcg_temp_new_i32();
8083 TCGv_i32 fp1
= tcg_temp_new_i32();
8085 gen_load_fpr32h(fp0
, fs
);
8086 gen_load_fpr32(fp1
, ft
);
8087 gen_store_fpr32(fp1
, fd
);
8088 gen_store_fpr32h(fp0
, fd
);
8089 tcg_temp_free_i32(fp0
);
8090 tcg_temp_free_i32(fp1
);
8095 check_cp1_64bitmode(ctx
);
8097 TCGv_i32 fp0
= tcg_temp_new_i32();
8098 TCGv_i32 fp1
= tcg_temp_new_i32();
8100 gen_load_fpr32h(fp0
, fs
);
8101 gen_load_fpr32h(fp1
, ft
);
8102 gen_store_fpr32(fp1
, fd
);
8103 gen_store_fpr32h(fp0
, fd
);
8104 tcg_temp_free_i32(fp0
);
8105 tcg_temp_free_i32(fp1
);
8112 case OPC_CMP_UEQ_PS
:
8113 case OPC_CMP_OLT_PS
:
8114 case OPC_CMP_ULT_PS
:
8115 case OPC_CMP_OLE_PS
:
8116 case OPC_CMP_ULE_PS
:
8118 case OPC_CMP_NGLE_PS
:
8119 case OPC_CMP_SEQ_PS
:
8120 case OPC_CMP_NGL_PS
:
8122 case OPC_CMP_NGE_PS
:
8124 case OPC_CMP_NGT_PS
:
8125 if (ctx
->opcode
& (1 << 6)) {
8126 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
8127 opn
= condnames_abs
[func
-48];
8129 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
8130 opn
= condnames
[func
-48];
8135 generate_exception (ctx
, EXCP_RI
);
8138 (void)opn
; /* avoid a compiler warning */
8141 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
8144 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
8147 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
8152 /* Coprocessor 3 (FPU) */
8153 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
8154 int fd
, int fs
, int base
, int index
)
8156 const char *opn
= "extended float load/store";
8158 TCGv t0
= tcg_temp_new();
8161 gen_load_gpr(t0
, index
);
8162 } else if (index
== 0) {
8163 gen_load_gpr(t0
, base
);
8165 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
8167 /* Don't do NOP if destination is zero: we must perform the actual
8169 save_cpu_state(ctx
, 0);
8174 TCGv_i32 fp0
= tcg_temp_new_i32();
8176 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
8177 tcg_gen_trunc_tl_i32(fp0
, t0
);
8178 gen_store_fpr32(fp0
, fd
);
8179 tcg_temp_free_i32(fp0
);
8185 check_cp1_registers(ctx
, fd
);
8187 TCGv_i64 fp0
= tcg_temp_new_i64();
8189 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8190 gen_store_fpr64(ctx
, fp0
, fd
);
8191 tcg_temp_free_i64(fp0
);
8196 check_cp1_64bitmode(ctx
);
8197 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8199 TCGv_i64 fp0
= tcg_temp_new_i64();
8201 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
8202 gen_store_fpr64(ctx
, fp0
, fd
);
8203 tcg_temp_free_i64(fp0
);
8210 TCGv_i32 fp0
= tcg_temp_new_i32();
8211 TCGv t1
= tcg_temp_new();
8213 gen_load_fpr32(fp0
, fs
);
8214 tcg_gen_extu_i32_tl(t1
, fp0
);
8215 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
8216 tcg_temp_free_i32(fp0
);
8224 check_cp1_registers(ctx
, fs
);
8226 TCGv_i64 fp0
= tcg_temp_new_i64();
8228 gen_load_fpr64(ctx
, fp0
, fs
);
8229 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8230 tcg_temp_free_i64(fp0
);
8236 check_cp1_64bitmode(ctx
);
8237 tcg_gen_andi_tl(t0
, t0
, ~0x7);
8239 TCGv_i64 fp0
= tcg_temp_new_i64();
8241 gen_load_fpr64(ctx
, fp0
, fs
);
8242 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
8243 tcg_temp_free_i64(fp0
);
8250 (void)opn
; (void)store
; /* avoid compiler warnings */
8251 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
8252 regnames
[index
], regnames
[base
]);
8255 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
8256 int fd
, int fr
, int fs
, int ft
)
8258 const char *opn
= "flt3_arith";
8262 check_cp1_64bitmode(ctx
);
8264 TCGv t0
= tcg_temp_local_new();
8265 TCGv_i32 fp
= tcg_temp_new_i32();
8266 TCGv_i32 fph
= tcg_temp_new_i32();
8267 int l1
= gen_new_label();
8268 int l2
= gen_new_label();
8270 gen_load_gpr(t0
, fr
);
8271 tcg_gen_andi_tl(t0
, t0
, 0x7);
8273 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
8274 gen_load_fpr32(fp
, fs
);
8275 gen_load_fpr32h(fph
, fs
);
8276 gen_store_fpr32(fp
, fd
);
8277 gen_store_fpr32h(fph
, fd
);
8280 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
8282 #ifdef TARGET_WORDS_BIGENDIAN
8283 gen_load_fpr32(fp
, fs
);
8284 gen_load_fpr32h(fph
, ft
);
8285 gen_store_fpr32h(fp
, fd
);
8286 gen_store_fpr32(fph
, fd
);
8288 gen_load_fpr32h(fph
, fs
);
8289 gen_load_fpr32(fp
, ft
);
8290 gen_store_fpr32(fph
, fd
);
8291 gen_store_fpr32h(fp
, fd
);
8294 tcg_temp_free_i32(fp
);
8295 tcg_temp_free_i32(fph
);
8302 TCGv_i32 fp0
= tcg_temp_new_i32();
8303 TCGv_i32 fp1
= tcg_temp_new_i32();
8304 TCGv_i32 fp2
= tcg_temp_new_i32();
8306 gen_load_fpr32(fp0
, fs
);
8307 gen_load_fpr32(fp1
, ft
);
8308 gen_load_fpr32(fp2
, fr
);
8309 gen_helper_float_muladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8310 tcg_temp_free_i32(fp0
);
8311 tcg_temp_free_i32(fp1
);
8312 gen_store_fpr32(fp2
, fd
);
8313 tcg_temp_free_i32(fp2
);
8319 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8321 TCGv_i64 fp0
= tcg_temp_new_i64();
8322 TCGv_i64 fp1
= tcg_temp_new_i64();
8323 TCGv_i64 fp2
= tcg_temp_new_i64();
8325 gen_load_fpr64(ctx
, fp0
, fs
);
8326 gen_load_fpr64(ctx
, fp1
, ft
);
8327 gen_load_fpr64(ctx
, fp2
, fr
);
8328 gen_helper_float_muladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8329 tcg_temp_free_i64(fp0
);
8330 tcg_temp_free_i64(fp1
);
8331 gen_store_fpr64(ctx
, fp2
, fd
);
8332 tcg_temp_free_i64(fp2
);
8337 check_cp1_64bitmode(ctx
);
8339 TCGv_i64 fp0
= tcg_temp_new_i64();
8340 TCGv_i64 fp1
= tcg_temp_new_i64();
8341 TCGv_i64 fp2
= tcg_temp_new_i64();
8343 gen_load_fpr64(ctx
, fp0
, fs
);
8344 gen_load_fpr64(ctx
, fp1
, ft
);
8345 gen_load_fpr64(ctx
, fp2
, fr
);
8346 gen_helper_float_muladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8347 tcg_temp_free_i64(fp0
);
8348 tcg_temp_free_i64(fp1
);
8349 gen_store_fpr64(ctx
, fp2
, fd
);
8350 tcg_temp_free_i64(fp2
);
8357 TCGv_i32 fp0
= tcg_temp_new_i32();
8358 TCGv_i32 fp1
= tcg_temp_new_i32();
8359 TCGv_i32 fp2
= tcg_temp_new_i32();
8361 gen_load_fpr32(fp0
, fs
);
8362 gen_load_fpr32(fp1
, ft
);
8363 gen_load_fpr32(fp2
, fr
);
8364 gen_helper_float_mulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8365 tcg_temp_free_i32(fp0
);
8366 tcg_temp_free_i32(fp1
);
8367 gen_store_fpr32(fp2
, fd
);
8368 tcg_temp_free_i32(fp2
);
8374 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8376 TCGv_i64 fp0
= tcg_temp_new_i64();
8377 TCGv_i64 fp1
= tcg_temp_new_i64();
8378 TCGv_i64 fp2
= tcg_temp_new_i64();
8380 gen_load_fpr64(ctx
, fp0
, fs
);
8381 gen_load_fpr64(ctx
, fp1
, ft
);
8382 gen_load_fpr64(ctx
, fp2
, fr
);
8383 gen_helper_float_mulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8384 tcg_temp_free_i64(fp0
);
8385 tcg_temp_free_i64(fp1
);
8386 gen_store_fpr64(ctx
, fp2
, fd
);
8387 tcg_temp_free_i64(fp2
);
8392 check_cp1_64bitmode(ctx
);
8394 TCGv_i64 fp0
= tcg_temp_new_i64();
8395 TCGv_i64 fp1
= tcg_temp_new_i64();
8396 TCGv_i64 fp2
= tcg_temp_new_i64();
8398 gen_load_fpr64(ctx
, fp0
, fs
);
8399 gen_load_fpr64(ctx
, fp1
, ft
);
8400 gen_load_fpr64(ctx
, fp2
, fr
);
8401 gen_helper_float_mulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8402 tcg_temp_free_i64(fp0
);
8403 tcg_temp_free_i64(fp1
);
8404 gen_store_fpr64(ctx
, fp2
, fd
);
8405 tcg_temp_free_i64(fp2
);
8412 TCGv_i32 fp0
= tcg_temp_new_i32();
8413 TCGv_i32 fp1
= tcg_temp_new_i32();
8414 TCGv_i32 fp2
= tcg_temp_new_i32();
8416 gen_load_fpr32(fp0
, fs
);
8417 gen_load_fpr32(fp1
, ft
);
8418 gen_load_fpr32(fp2
, fr
);
8419 gen_helper_float_nmuladd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8420 tcg_temp_free_i32(fp0
);
8421 tcg_temp_free_i32(fp1
);
8422 gen_store_fpr32(fp2
, fd
);
8423 tcg_temp_free_i32(fp2
);
8429 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8431 TCGv_i64 fp0
= tcg_temp_new_i64();
8432 TCGv_i64 fp1
= tcg_temp_new_i64();
8433 TCGv_i64 fp2
= tcg_temp_new_i64();
8435 gen_load_fpr64(ctx
, fp0
, fs
);
8436 gen_load_fpr64(ctx
, fp1
, ft
);
8437 gen_load_fpr64(ctx
, fp2
, fr
);
8438 gen_helper_float_nmuladd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8439 tcg_temp_free_i64(fp0
);
8440 tcg_temp_free_i64(fp1
);
8441 gen_store_fpr64(ctx
, fp2
, fd
);
8442 tcg_temp_free_i64(fp2
);
8447 check_cp1_64bitmode(ctx
);
8449 TCGv_i64 fp0
= tcg_temp_new_i64();
8450 TCGv_i64 fp1
= tcg_temp_new_i64();
8451 TCGv_i64 fp2
= tcg_temp_new_i64();
8453 gen_load_fpr64(ctx
, fp0
, fs
);
8454 gen_load_fpr64(ctx
, fp1
, ft
);
8455 gen_load_fpr64(ctx
, fp2
, fr
);
8456 gen_helper_float_nmuladd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8457 tcg_temp_free_i64(fp0
);
8458 tcg_temp_free_i64(fp1
);
8459 gen_store_fpr64(ctx
, fp2
, fd
);
8460 tcg_temp_free_i64(fp2
);
8467 TCGv_i32 fp0
= tcg_temp_new_i32();
8468 TCGv_i32 fp1
= tcg_temp_new_i32();
8469 TCGv_i32 fp2
= tcg_temp_new_i32();
8471 gen_load_fpr32(fp0
, fs
);
8472 gen_load_fpr32(fp1
, ft
);
8473 gen_load_fpr32(fp2
, fr
);
8474 gen_helper_float_nmulsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8475 tcg_temp_free_i32(fp0
);
8476 tcg_temp_free_i32(fp1
);
8477 gen_store_fpr32(fp2
, fd
);
8478 tcg_temp_free_i32(fp2
);
8484 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8486 TCGv_i64 fp0
= tcg_temp_new_i64();
8487 TCGv_i64 fp1
= tcg_temp_new_i64();
8488 TCGv_i64 fp2
= tcg_temp_new_i64();
8490 gen_load_fpr64(ctx
, fp0
, fs
);
8491 gen_load_fpr64(ctx
, fp1
, ft
);
8492 gen_load_fpr64(ctx
, fp2
, fr
);
8493 gen_helper_float_nmulsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8494 tcg_temp_free_i64(fp0
);
8495 tcg_temp_free_i64(fp1
);
8496 gen_store_fpr64(ctx
, fp2
, fd
);
8497 tcg_temp_free_i64(fp2
);
8502 check_cp1_64bitmode(ctx
);
8504 TCGv_i64 fp0
= tcg_temp_new_i64();
8505 TCGv_i64 fp1
= tcg_temp_new_i64();
8506 TCGv_i64 fp2
= tcg_temp_new_i64();
8508 gen_load_fpr64(ctx
, fp0
, fs
);
8509 gen_load_fpr64(ctx
, fp1
, ft
);
8510 gen_load_fpr64(ctx
, fp2
, fr
);
8511 gen_helper_float_nmulsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8512 tcg_temp_free_i64(fp0
);
8513 tcg_temp_free_i64(fp1
);
8514 gen_store_fpr64(ctx
, fp2
, fd
);
8515 tcg_temp_free_i64(fp2
);
8521 generate_exception (ctx
, EXCP_RI
);
8524 (void)opn
; /* avoid a compiler warning */
8525 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
8526 fregnames
[fs
], fregnames
[ft
]);
8530 gen_rdhwr (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
)
8534 #if !defined(CONFIG_USER_ONLY)
8535 /* The Linux kernel will emulate rdhwr if it's not supported natively.
8536 Therefore only check the ISA in system mode. */
8537 check_insn(env
, ctx
, ISA_MIPS32R2
);
8539 t0
= tcg_temp_new();
8543 save_cpu_state(ctx
, 1);
8544 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
8545 gen_store_gpr(t0
, rt
);
8548 save_cpu_state(ctx
, 1);
8549 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
8550 gen_store_gpr(t0
, rt
);
8553 save_cpu_state(ctx
, 1);
8554 gen_helper_rdhwr_cc(t0
, cpu_env
);
8555 gen_store_gpr(t0
, rt
);
8558 save_cpu_state(ctx
, 1);
8559 gen_helper_rdhwr_ccres(t0
, cpu_env
);
8560 gen_store_gpr(t0
, rt
);
8563 #if defined(CONFIG_USER_ONLY)
8564 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUMIPSState
, tls_value
));
8565 gen_store_gpr(t0
, rt
);
8568 /* XXX: Some CPUs implement this in hardware.
8569 Not supported yet. */
8571 default: /* Invalid */
8572 MIPS_INVAL("rdhwr");
8573 generate_exception(ctx
, EXCP_RI
);
8579 static void handle_delay_slot (CPUMIPSState
*env
, DisasContext
*ctx
,
8582 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8583 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8584 /* Branches completion */
8585 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8586 ctx
->bstate
= BS_BRANCH
;
8587 save_cpu_state(ctx
, 0);
8588 /* FIXME: Need to clear can_do_io. */
8589 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
8591 /* unconditional branch */
8592 MIPS_DEBUG("unconditional branch");
8593 if (proc_hflags
& MIPS_HFLAG_BX
) {
8594 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
8596 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8599 /* blikely taken case */
8600 MIPS_DEBUG("blikely branch taken");
8601 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8604 /* Conditional branch */
8605 MIPS_DEBUG("conditional branch");
8607 int l1
= gen_new_label();
8609 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
8610 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
8612 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8616 /* unconditional branch to register */
8617 MIPS_DEBUG("branch to register");
8618 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
8619 TCGv t0
= tcg_temp_new();
8620 TCGv_i32 t1
= tcg_temp_new_i32();
8622 tcg_gen_andi_tl(t0
, btarget
, 0x1);
8623 tcg_gen_trunc_tl_i32(t1
, t0
);
8625 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
8626 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
8627 tcg_gen_or_i32(hflags
, hflags
, t1
);
8628 tcg_temp_free_i32(t1
);
8630 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
8632 tcg_gen_mov_tl(cpu_PC
, btarget
);
8634 if (ctx
->singlestep_enabled
) {
8635 save_cpu_state(ctx
, 0);
8636 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
8641 MIPS_DEBUG("unknown branch");
8647 /* ISA extensions (ASEs) */
8648 /* MIPS16 extension to MIPS32 */
8650 /* MIPS16 major opcodes */
8652 M16_OPC_ADDIUSP
= 0x00,
8653 M16_OPC_ADDIUPC
= 0x01,
8656 M16_OPC_BEQZ
= 0x04,
8657 M16_OPC_BNEQZ
= 0x05,
8658 M16_OPC_SHIFT
= 0x06,
8660 M16_OPC_RRIA
= 0x08,
8661 M16_OPC_ADDIU8
= 0x09,
8662 M16_OPC_SLTI
= 0x0a,
8663 M16_OPC_SLTIU
= 0x0b,
8666 M16_OPC_CMPI
= 0x0e,
8670 M16_OPC_LWSP
= 0x12,
8674 M16_OPC_LWPC
= 0x16,
8678 M16_OPC_SWSP
= 0x1a,
8682 M16_OPC_EXTEND
= 0x1e,
8686 /* I8 funct field */
8705 /* RR funct field */
8739 /* I64 funct field */
8751 /* RR ry field for CNVT */
8753 RR_RY_CNVT_ZEB
= 0x0,
8754 RR_RY_CNVT_ZEH
= 0x1,
8755 RR_RY_CNVT_ZEW
= 0x2,
8756 RR_RY_CNVT_SEB
= 0x4,
8757 RR_RY_CNVT_SEH
= 0x5,
8758 RR_RY_CNVT_SEW
= 0x6,
8761 static int xlat (int r
)
8763 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
8768 static void gen_mips16_save (DisasContext
*ctx
,
8769 int xsregs
, int aregs
,
8770 int do_ra
, int do_s0
, int do_s1
,
8773 TCGv t0
= tcg_temp_new();
8774 TCGv t1
= tcg_temp_new();
8804 generate_exception(ctx
, EXCP_RI
);
8810 gen_base_offset_addr(ctx
, t0
, 29, 12);
8811 gen_load_gpr(t1
, 7);
8812 op_st_sw(t1
, t0
, ctx
);
8815 gen_base_offset_addr(ctx
, t0
, 29, 8);
8816 gen_load_gpr(t1
, 6);
8817 op_st_sw(t1
, t0
, ctx
);
8820 gen_base_offset_addr(ctx
, t0
, 29, 4);
8821 gen_load_gpr(t1
, 5);
8822 op_st_sw(t1
, t0
, ctx
);
8825 gen_base_offset_addr(ctx
, t0
, 29, 0);
8826 gen_load_gpr(t1
, 4);
8827 op_st_sw(t1
, t0
, ctx
);
8830 gen_load_gpr(t0
, 29);
8832 #define DECR_AND_STORE(reg) do { \
8833 tcg_gen_subi_tl(t0, t0, 4); \
8834 gen_load_gpr(t1, reg); \
8835 op_st_sw(t1, t0, ctx); \
8899 generate_exception(ctx
, EXCP_RI
);
8915 #undef DECR_AND_STORE
8917 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8922 static void gen_mips16_restore (DisasContext
*ctx
,
8923 int xsregs
, int aregs
,
8924 int do_ra
, int do_s0
, int do_s1
,
8928 TCGv t0
= tcg_temp_new();
8929 TCGv t1
= tcg_temp_new();
8931 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8933 #define DECR_AND_LOAD(reg) do { \
8934 tcg_gen_subi_tl(t0, t0, 4); \
8935 op_ld_lw(t1, t0, ctx); \
8936 gen_store_gpr(t1, reg); \
9000 generate_exception(ctx
, EXCP_RI
);
9016 #undef DECR_AND_LOAD
9018 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
9023 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
9024 int is_64_bit
, int extended
)
9028 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9029 generate_exception(ctx
, EXCP_RI
);
9033 t0
= tcg_temp_new();
9035 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
9036 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
9038 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9044 #if defined(TARGET_MIPS64)
9045 static void decode_i64_mips16 (CPUMIPSState
*env
, DisasContext
*ctx
,
9046 int ry
, int funct
, int16_t offset
,
9052 offset
= extended
? offset
: offset
<< 3;
9053 gen_ld(env
, ctx
, OPC_LD
, ry
, 29, offset
);
9057 offset
= extended
? offset
: offset
<< 3;
9058 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
9062 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
9063 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
9067 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
9068 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
9071 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9072 generate_exception(ctx
, EXCP_RI
);
9074 offset
= extended
? offset
: offset
<< 3;
9075 gen_ld(env
, ctx
, OPC_LDPC
, ry
, 0, offset
);
9080 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
9081 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
9085 offset
= extended
? offset
: offset
<< 2;
9086 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
9090 offset
= extended
? offset
: offset
<< 2;
9091 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
9097 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9100 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9101 int op
, rx
, ry
, funct
, sa
;
9102 int16_t imm
, offset
;
9104 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
9105 op
= (ctx
->opcode
>> 11) & 0x1f;
9106 sa
= (ctx
->opcode
>> 22) & 0x1f;
9107 funct
= (ctx
->opcode
>> 8) & 0x7;
9108 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9109 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9110 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
9111 | ((ctx
->opcode
>> 21) & 0x3f) << 5
9112 | (ctx
->opcode
& 0x1f));
9114 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
9117 case M16_OPC_ADDIUSP
:
9118 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9120 case M16_OPC_ADDIUPC
:
9121 gen_addiupc(ctx
, rx
, imm
, 0, 1);
9124 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
9125 /* No delay slot, so just process as a normal instruction */
9128 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
9129 /* No delay slot, so just process as a normal instruction */
9132 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
9133 /* No delay slot, so just process as a normal instruction */
9136 switch (ctx
->opcode
& 0x3) {
9138 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9141 #if defined(TARGET_MIPS64)
9143 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9145 generate_exception(ctx
, EXCP_RI
);
9149 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9152 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9156 #if defined(TARGET_MIPS64)
9159 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
);
9163 imm
= ctx
->opcode
& 0xf;
9164 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
9165 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
9166 imm
= (int16_t) (imm
<< 1) >> 1;
9167 if ((ctx
->opcode
>> 4) & 0x1) {
9168 #if defined(TARGET_MIPS64)
9170 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9172 generate_exception(ctx
, EXCP_RI
);
9175 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9178 case M16_OPC_ADDIU8
:
9179 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9182 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9185 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9190 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
9193 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
9196 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
9199 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
9203 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
9204 int aregs
= (ctx
->opcode
>> 16) & 0xf;
9205 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
9206 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
9207 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
9208 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
9209 | (ctx
->opcode
& 0xf)) << 3;
9211 if (ctx
->opcode
& (1 << 7)) {
9212 gen_mips16_save(ctx
, xsregs
, aregs
,
9213 do_ra
, do_s0
, do_s1
,
9216 gen_mips16_restore(ctx
, xsregs
, aregs
,
9217 do_ra
, do_s0
, do_s1
,
9223 generate_exception(ctx
, EXCP_RI
);
9228 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
9231 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
9233 #if defined(TARGET_MIPS64)
9235 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
9239 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9242 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
);
9245 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, offset
);
9248 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
);
9251 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9254 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
);
9257 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, offset
);
9259 #if defined(TARGET_MIPS64)
9261 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
);
9265 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9268 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
9271 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
9274 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
9276 #if defined(TARGET_MIPS64)
9278 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
9282 generate_exception(ctx
, EXCP_RI
);
9289 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
9294 int op
, cnvt_op
, op1
, offset
;
9298 op
= (ctx
->opcode
>> 11) & 0x1f;
9299 sa
= (ctx
->opcode
>> 2) & 0x7;
9300 sa
= sa
== 0 ? 8 : sa
;
9301 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
9302 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
9303 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
9304 op1
= offset
= ctx
->opcode
& 0x1f;
9309 case M16_OPC_ADDIUSP
:
9311 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
9313 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
9316 case M16_OPC_ADDIUPC
:
9317 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
9320 offset
= (ctx
->opcode
& 0x7ff) << 1;
9321 offset
= (int16_t)(offset
<< 4) >> 4;
9322 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
9323 /* No delay slot, so just process as a normal instruction */
9326 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
9327 offset
= (((ctx
->opcode
& 0x1f) << 21)
9328 | ((ctx
->opcode
>> 5) & 0x1f) << 16
9330 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
9331 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
9336 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9337 /* No delay slot, so just process as a normal instruction */
9340 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
9341 /* No delay slot, so just process as a normal instruction */
9344 switch (ctx
->opcode
& 0x3) {
9346 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
9349 #if defined(TARGET_MIPS64)
9351 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
9353 generate_exception(ctx
, EXCP_RI
);
9357 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
9360 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
9364 #if defined(TARGET_MIPS64)
9367 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
9372 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
9374 if ((ctx
->opcode
>> 4) & 1) {
9375 #if defined(TARGET_MIPS64)
9377 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
9379 generate_exception(ctx
, EXCP_RI
);
9382 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
9386 case M16_OPC_ADDIU8
:
9388 int16_t imm
= (int8_t) ctx
->opcode
;
9390 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
9395 int16_t imm
= (uint8_t) ctx
->opcode
;
9396 gen_slt_imm(env
, ctx
, OPC_SLTI
, 24, rx
, imm
);
9401 int16_t imm
= (uint8_t) ctx
->opcode
;
9402 gen_slt_imm(env
, ctx
, OPC_SLTIU
, 24, rx
, imm
);
9409 funct
= (ctx
->opcode
>> 8) & 0x7;
9412 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
9413 ((int8_t)ctx
->opcode
) << 1);
9416 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
9417 ((int8_t)ctx
->opcode
) << 1);
9420 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9423 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
9424 ((int8_t)ctx
->opcode
) << 3);
9428 int do_ra
= ctx
->opcode
& (1 << 6);
9429 int do_s0
= ctx
->opcode
& (1 << 5);
9430 int do_s1
= ctx
->opcode
& (1 << 4);
9431 int framesize
= ctx
->opcode
& 0xf;
9433 if (framesize
== 0) {
9436 framesize
= framesize
<< 3;
9439 if (ctx
->opcode
& (1 << 7)) {
9440 gen_mips16_save(ctx
, 0, 0,
9441 do_ra
, do_s0
, do_s1
, framesize
);
9443 gen_mips16_restore(ctx
, 0, 0,
9444 do_ra
, do_s0
, do_s1
, framesize
);
9450 int rz
= xlat(ctx
->opcode
& 0x7);
9452 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9453 ((ctx
->opcode
>> 5) & 0x7);
9454 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
9458 reg32
= ctx
->opcode
& 0x1f;
9459 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
9462 generate_exception(ctx
, EXCP_RI
);
9469 int16_t imm
= (uint8_t) ctx
->opcode
;
9471 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
9476 int16_t imm
= (uint8_t) ctx
->opcode
;
9477 gen_logic_imm(env
, ctx
, OPC_XORI
, 24, rx
, imm
);
9480 #if defined(TARGET_MIPS64)
9483 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9487 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9490 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9493 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9496 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9499 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9502 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
9505 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
9507 #if defined (TARGET_MIPS64)
9510 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
9514 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9517 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
9520 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9523 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
9527 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
9530 switch (ctx
->opcode
& 0x3) {
9532 mips32_op
= OPC_ADDU
;
9535 mips32_op
= OPC_SUBU
;
9537 #if defined(TARGET_MIPS64)
9539 mips32_op
= OPC_DADDU
;
9543 mips32_op
= OPC_DSUBU
;
9548 generate_exception(ctx
, EXCP_RI
);
9552 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
9561 int nd
= (ctx
->opcode
>> 7) & 0x1;
9562 int link
= (ctx
->opcode
>> 6) & 0x1;
9563 int ra
= (ctx
->opcode
>> 5) & 0x1;
9566 op
= nd
? OPC_JALRC
: OPC_JALRS
;
9571 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
9578 /* XXX: not clear which exception should be raised
9579 * when in debug mode...
9581 check_insn(env
, ctx
, ISA_MIPS32
);
9582 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9583 generate_exception(ctx
, EXCP_DBp
);
9585 generate_exception(ctx
, EXCP_DBp
);
9589 gen_slt(env
, ctx
, OPC_SLT
, 24, rx
, ry
);
9592 gen_slt(env
, ctx
, OPC_SLTU
, 24, rx
, ry
);
9595 generate_exception(ctx
, EXCP_BREAK
);
9598 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
9601 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
9604 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
9606 #if defined (TARGET_MIPS64)
9609 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
9613 gen_logic(env
, ctx
, OPC_XOR
, 24, rx
, ry
);
9616 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
9619 gen_logic(env
, ctx
, OPC_AND
, rx
, rx
, ry
);
9622 gen_logic(env
, ctx
, OPC_OR
, rx
, rx
, ry
);
9625 gen_logic(env
, ctx
, OPC_XOR
, rx
, rx
, ry
);
9628 gen_logic(env
, ctx
, OPC_NOR
, rx
, ry
, 0);
9631 gen_HILO(ctx
, OPC_MFHI
, rx
);
9635 case RR_RY_CNVT_ZEB
:
9636 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9638 case RR_RY_CNVT_ZEH
:
9639 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9641 case RR_RY_CNVT_SEB
:
9642 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9644 case RR_RY_CNVT_SEH
:
9645 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9647 #if defined (TARGET_MIPS64)
9648 case RR_RY_CNVT_ZEW
:
9650 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9652 case RR_RY_CNVT_SEW
:
9654 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9658 generate_exception(ctx
, EXCP_RI
);
9663 gen_HILO(ctx
, OPC_MFLO
, rx
);
9665 #if defined (TARGET_MIPS64)
9668 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
9672 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
9676 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
9680 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
9684 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
9687 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
9690 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
9693 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
9695 #if defined (TARGET_MIPS64)
9698 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
9702 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
9706 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
9710 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
9714 generate_exception(ctx
, EXCP_RI
);
9718 case M16_OPC_EXTEND
:
9719 decode_extended_mips16_opc(env
, ctx
, is_branch
);
9722 #if defined(TARGET_MIPS64)
9724 funct
= (ctx
->opcode
>> 8) & 0x7;
9725 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
9729 generate_exception(ctx
, EXCP_RI
);
9736 /* microMIPS extension to MIPS32 */
9738 /* microMIPS32 major opcodes */
9777 /* 0x20 is reserved */
9787 /* 0x28 and 0x29 are reserved */
9797 /* 0x30 and 0x31 are reserved */
9807 /* 0x38 and 0x39 are reserved */
9818 /* POOL32A encoding of minor opcode field */
9821 /* These opcodes are distinguished only by bits 9..6; those bits are
9822 * what are recorded below. */
9848 /* The following can be distinguished by their lower 6 bits. */
9854 /* POOL32AXF encoding of minor opcode field extension */
9868 /* bits 13..12 for 0x01 */
9874 /* bits 13..12 for 0x2a */
9880 /* bits 13..12 for 0x32 */
9884 /* bits 15..12 for 0x2c */
9900 /* bits 15..12 for 0x34 */
9908 /* bits 15..12 for 0x3c */
9910 JR
= 0x0, /* alias */
9915 /* bits 15..12 for 0x05 */
9919 /* bits 15..12 for 0x0d */
9929 /* bits 15..12 for 0x15 */
9935 /* bits 15..12 for 0x1d */
9939 /* bits 15..12 for 0x2d */
9944 /* bits 15..12 for 0x35 */
9951 /* POOL32B encoding of minor opcode field (bits 15..12) */
9967 /* POOL32C encoding of minor opcode field (bits 15..12) */
9975 /* 0xa is reserved */
9982 /* 0x6 is reserved */
9988 /* POOL32F encoding of minor opcode field (bits 5..0) */
9991 /* These are the bit 7..6 values */
10002 /* These are the bit 8..6 values */
10046 CABS_COND_FMT
= 0x1c, /* MIPS3D */
10050 /* POOL32Fxf encoding of minor opcode extension field */
10088 /* POOL32I encoding of minor opcode field (bits 25..21) */
10113 /* These overlap and are distinguished by bit16 of the instruction */
10122 /* POOL16A encoding of minor opcode field */
10129 /* POOL16B encoding of minor opcode field */
10136 /* POOL16C encoding of minor opcode field */
10156 /* POOL16D encoding of minor opcode field */
10163 /* POOL16E encoding of minor opcode field */
10170 static int mmreg (int r
)
10172 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10177 /* Used for 16-bit store instructions. */
10178 static int mmreg2 (int r
)
10180 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
10185 #define uMIPS_RD(op) ((op >> 7) & 0x7)
10186 #define uMIPS_RS(op) ((op >> 4) & 0x7)
10187 #define uMIPS_RS2(op) uMIPS_RS(op)
10188 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
10189 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
10190 #define uMIPS_RS5(op) (op & 0x1f)
10192 /* Signed immediate */
10193 #define SIMM(op, start, width) \
10194 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
10197 /* Zero-extended immediate */
10198 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
10200 static void gen_addiur1sp (CPUMIPSState
*env
, DisasContext
*ctx
)
10202 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10204 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
10207 static void gen_addiur2 (CPUMIPSState
*env
, DisasContext
*ctx
)
10209 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
10210 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10211 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10213 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
10216 static void gen_addiusp (CPUMIPSState
*env
, DisasContext
*ctx
)
10218 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
10221 if (encoded
<= 1) {
10222 decoded
= 256 + encoded
;
10223 } else if (encoded
<= 255) {
10225 } else if (encoded
<= 509) {
10226 decoded
= encoded
- 512;
10228 decoded
= encoded
- 768;
10231 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
10234 static void gen_addius5 (CPUMIPSState
*env
, DisasContext
*ctx
)
10236 int imm
= SIMM(ctx
->opcode
, 1, 4);
10237 int rd
= (ctx
->opcode
>> 5) & 0x1f;
10239 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
10242 static void gen_andi16 (CPUMIPSState
*env
, DisasContext
*ctx
)
10244 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
10245 31, 32, 63, 64, 255, 32768, 65535 };
10246 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
10247 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
10248 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
10250 gen_logic_imm(env
, ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
10253 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
10254 int base
, int16_t offset
)
10256 const char *opn
= "ldst_multiple";
10260 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10261 generate_exception(ctx
, EXCP_RI
);
10265 t0
= tcg_temp_new();
10267 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10269 t1
= tcg_const_tl(reglist
);
10270 t2
= tcg_const_i32(ctx
->mem_idx
);
10272 save_cpu_state(ctx
, 1);
10275 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
10279 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
10282 #ifdef TARGET_MIPS64
10284 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
10288 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
10294 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
10297 tcg_temp_free_i32(t2
);
10301 static void gen_pool16c_insn (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
10303 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
10304 int rs
= mmreg(ctx
->opcode
& 0x7);
10307 switch (((ctx
->opcode
) >> 4) & 0x3f) {
10312 gen_logic(env
, ctx
, OPC_NOR
, rd
, rs
, 0);
10318 gen_logic(env
, ctx
, OPC_XOR
, rd
, rd
, rs
);
10324 gen_logic(env
, ctx
, OPC_AND
, rd
, rd
, rs
);
10330 gen_logic(env
, ctx
, OPC_OR
, rd
, rd
, rs
);
10337 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10338 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10340 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
10349 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
10350 int offset
= ZIMM(ctx
->opcode
, 0, 4);
10352 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
10359 int reg
= ctx
->opcode
& 0x1f;
10361 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10368 int reg
= ctx
->opcode
& 0x1f;
10370 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
10371 /* Let normal delay slot handling in our caller take us
10372 to the branch target. */
10384 int reg
= ctx
->opcode
& 0x1f;
10386 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
10392 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
10396 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
10399 generate_exception(ctx
, EXCP_BREAK
);
10402 /* XXX: not clear which exception should be raised
10403 * when in debug mode...
10405 check_insn(env
, ctx
, ISA_MIPS32
);
10406 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10407 generate_exception(ctx
, EXCP_DBp
);
10409 generate_exception(ctx
, EXCP_DBp
);
10412 case JRADDIUSP
+ 0:
10413 case JRADDIUSP
+ 1:
10415 int imm
= ZIMM(ctx
->opcode
, 0, 5);
10417 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
10418 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
10419 /* Let normal delay slot handling in our caller take us
10420 to the branch target. */
10424 generate_exception(ctx
, EXCP_RI
);
10429 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10431 TCGv t0
= tcg_temp_new();
10432 TCGv t1
= tcg_temp_new();
10434 gen_load_gpr(t0
, base
);
10437 gen_load_gpr(t1
, index
);
10438 tcg_gen_shli_tl(t1
, t1
, 2);
10439 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10442 save_cpu_state(ctx
, 0);
10443 op_ld_lw(t1
, t0
, ctx
);
10444 gen_store_gpr(t1
, rd
);
10450 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10451 int base
, int16_t offset
)
10453 const char *opn
= "ldst_pair";
10456 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
10457 generate_exception(ctx
, EXCP_RI
);
10461 t0
= tcg_temp_new();
10462 t1
= tcg_temp_new();
10464 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10469 generate_exception(ctx
, EXCP_RI
);
10472 save_cpu_state(ctx
, 0);
10473 op_ld_lw(t1
, t0
, ctx
);
10474 gen_store_gpr(t1
, rd
);
10475 tcg_gen_movi_tl(t1
, 4);
10476 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10477 op_ld_lw(t1
, t0
, ctx
);
10478 gen_store_gpr(t1
, rd
+1);
10482 save_cpu_state(ctx
, 0);
10483 gen_load_gpr(t1
, rd
);
10484 op_st_sw(t1
, t0
, ctx
);
10485 tcg_gen_movi_tl(t1
, 4);
10486 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10487 gen_load_gpr(t1
, rd
+1);
10488 op_st_sw(t1
, t0
, ctx
);
10491 #ifdef TARGET_MIPS64
10494 generate_exception(ctx
, EXCP_RI
);
10497 save_cpu_state(ctx
, 0);
10498 op_ld_ld(t1
, t0
, ctx
);
10499 gen_store_gpr(t1
, rd
);
10500 tcg_gen_movi_tl(t1
, 8);
10501 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10502 op_ld_ld(t1
, t0
, ctx
);
10503 gen_store_gpr(t1
, rd
+1);
10507 save_cpu_state(ctx
, 0);
10508 gen_load_gpr(t1
, rd
);
10509 op_st_sd(t1
, t0
, ctx
);
10510 tcg_gen_movi_tl(t1
, 8);
10511 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10512 gen_load_gpr(t1
, rd
+1);
10513 op_st_sd(t1
, t0
, ctx
);
10518 (void)opn
; /* avoid a compiler warning */
10519 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
10524 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
,
10527 int extension
= (ctx
->opcode
>> 6) & 0x3f;
10528 int minor
= (ctx
->opcode
>> 12) & 0xf;
10529 uint32_t mips32_op
;
10531 switch (extension
) {
10533 mips32_op
= OPC_TEQ
;
10536 mips32_op
= OPC_TGE
;
10539 mips32_op
= OPC_TGEU
;
10542 mips32_op
= OPC_TLT
;
10545 mips32_op
= OPC_TLTU
;
10548 mips32_op
= OPC_TNE
;
10550 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
10552 #ifndef CONFIG_USER_ONLY
10555 check_cp0_enabled(ctx
);
10557 /* Treat as NOP. */
10560 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
10564 check_cp0_enabled(ctx
);
10566 TCGv t0
= tcg_temp_new();
10568 gen_load_gpr(t0
, rt
);
10569 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
10577 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
10580 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
10583 mips32_op
= OPC_CLO
;
10586 mips32_op
= OPC_CLZ
;
10588 check_insn(env
, ctx
, ISA_MIPS32
);
10589 gen_cl(ctx
, mips32_op
, rt
, rs
);
10592 gen_rdhwr(env
, ctx
, rt
, rs
);
10595 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
10598 mips32_op
= OPC_MULT
;
10601 mips32_op
= OPC_MULTU
;
10604 mips32_op
= OPC_DIV
;
10607 mips32_op
= OPC_DIVU
;
10610 mips32_op
= OPC_MADD
;
10613 mips32_op
= OPC_MADDU
;
10616 mips32_op
= OPC_MSUB
;
10619 mips32_op
= OPC_MSUBU
;
10621 check_insn(env
, ctx
, ISA_MIPS32
);
10622 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
10625 goto pool32axf_invalid
;
10636 generate_exception_err(ctx
, EXCP_CpU
, 2);
10639 goto pool32axf_invalid
;
10646 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
10651 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
10655 goto pool32axf_invalid
;
10661 check_cp0_enabled(ctx
);
10662 check_insn(env
, ctx
, ISA_MIPS32R2
);
10663 gen_load_srsgpr(rt
, rs
);
10666 check_cp0_enabled(ctx
);
10667 check_insn(env
, ctx
, ISA_MIPS32R2
);
10668 gen_store_srsgpr(rt
, rs
);
10671 goto pool32axf_invalid
;
10674 #ifndef CONFIG_USER_ONLY
10678 mips32_op
= OPC_TLBP
;
10681 mips32_op
= OPC_TLBR
;
10684 mips32_op
= OPC_TLBWI
;
10687 mips32_op
= OPC_TLBWR
;
10690 mips32_op
= OPC_WAIT
;
10693 mips32_op
= OPC_DERET
;
10696 mips32_op
= OPC_ERET
;
10698 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
10701 goto pool32axf_invalid
;
10707 check_cp0_enabled(ctx
);
10709 TCGv t0
= tcg_temp_new();
10711 save_cpu_state(ctx
, 1);
10712 gen_helper_di(t0
, cpu_env
);
10713 gen_store_gpr(t0
, rs
);
10714 /* Stop translation as we may have switched the execution mode */
10715 ctx
->bstate
= BS_STOP
;
10720 check_cp0_enabled(ctx
);
10722 TCGv t0
= tcg_temp_new();
10724 save_cpu_state(ctx
, 1);
10725 gen_helper_ei(t0
, cpu_env
);
10726 gen_store_gpr(t0
, rs
);
10727 /* Stop translation as we may have switched the execution mode */
10728 ctx
->bstate
= BS_STOP
;
10733 goto pool32axf_invalid
;
10743 generate_exception(ctx
, EXCP_SYSCALL
);
10744 ctx
->bstate
= BS_STOP
;
10747 check_insn(env
, ctx
, ISA_MIPS32
);
10748 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10749 generate_exception(ctx
, EXCP_DBp
);
10751 generate_exception(ctx
, EXCP_DBp
);
10755 goto pool32axf_invalid
;
10761 gen_HILO(ctx
, OPC_MFHI
, rs
);
10764 gen_HILO(ctx
, OPC_MFLO
, rs
);
10767 gen_HILO(ctx
, OPC_MTHI
, rs
);
10770 gen_HILO(ctx
, OPC_MTLO
, rs
);
10773 goto pool32axf_invalid
;
10778 MIPS_INVAL("pool32axf");
10779 generate_exception(ctx
, EXCP_RI
);
10784 /* Values for microMIPS fmt field. Variable-width, depending on which
10785 formats the instruction supports. */
10804 static void gen_pool32fxf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
10806 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
10807 uint32_t mips32_op
;
10809 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
10810 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
10811 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
10813 switch (extension
) {
10814 case FLOAT_1BIT_FMT(CFC1
, 0):
10815 mips32_op
= OPC_CFC1
;
10817 case FLOAT_1BIT_FMT(CTC1
, 0):
10818 mips32_op
= OPC_CTC1
;
10820 case FLOAT_1BIT_FMT(MFC1
, 0):
10821 mips32_op
= OPC_MFC1
;
10823 case FLOAT_1BIT_FMT(MTC1
, 0):
10824 mips32_op
= OPC_MTC1
;
10826 case FLOAT_1BIT_FMT(MFHC1
, 0):
10827 mips32_op
= OPC_MFHC1
;
10829 case FLOAT_1BIT_FMT(MTHC1
, 0):
10830 mips32_op
= OPC_MTHC1
;
10832 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10835 /* Reciprocal square root */
10836 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10837 mips32_op
= OPC_RSQRT_S
;
10839 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10840 mips32_op
= OPC_RSQRT_D
;
10844 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10845 mips32_op
= OPC_SQRT_S
;
10847 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10848 mips32_op
= OPC_SQRT_D
;
10852 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10853 mips32_op
= OPC_RECIP_S
;
10855 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10856 mips32_op
= OPC_RECIP_D
;
10860 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10861 mips32_op
= OPC_FLOOR_L_S
;
10863 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10864 mips32_op
= OPC_FLOOR_L_D
;
10866 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10867 mips32_op
= OPC_FLOOR_W_S
;
10869 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10870 mips32_op
= OPC_FLOOR_W_D
;
10874 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10875 mips32_op
= OPC_CEIL_L_S
;
10877 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10878 mips32_op
= OPC_CEIL_L_D
;
10880 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10881 mips32_op
= OPC_CEIL_W_S
;
10883 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10884 mips32_op
= OPC_CEIL_W_D
;
10888 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10889 mips32_op
= OPC_TRUNC_L_S
;
10891 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10892 mips32_op
= OPC_TRUNC_L_D
;
10894 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10895 mips32_op
= OPC_TRUNC_W_S
;
10897 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10898 mips32_op
= OPC_TRUNC_W_D
;
10902 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10903 mips32_op
= OPC_ROUND_L_S
;
10905 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10906 mips32_op
= OPC_ROUND_L_D
;
10908 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10909 mips32_op
= OPC_ROUND_W_S
;
10911 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10912 mips32_op
= OPC_ROUND_W_D
;
10915 /* Integer to floating-point conversion */
10916 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10917 mips32_op
= OPC_CVT_L_S
;
10919 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10920 mips32_op
= OPC_CVT_L_D
;
10922 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10923 mips32_op
= OPC_CVT_W_S
;
10925 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10926 mips32_op
= OPC_CVT_W_D
;
10929 /* Paired-foo conversions */
10930 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10931 mips32_op
= OPC_CVT_S_PL
;
10933 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10934 mips32_op
= OPC_CVT_S_PU
;
10936 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10937 mips32_op
= OPC_CVT_PW_PS
;
10939 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10940 mips32_op
= OPC_CVT_PS_PW
;
10943 /* Floating-point moves */
10944 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10945 mips32_op
= OPC_MOV_S
;
10947 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10948 mips32_op
= OPC_MOV_D
;
10950 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10951 mips32_op
= OPC_MOV_PS
;
10954 /* Absolute value */
10955 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10956 mips32_op
= OPC_ABS_S
;
10958 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10959 mips32_op
= OPC_ABS_D
;
10961 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10962 mips32_op
= OPC_ABS_PS
;
10966 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10967 mips32_op
= OPC_NEG_S
;
10969 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10970 mips32_op
= OPC_NEG_D
;
10972 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10973 mips32_op
= OPC_NEG_PS
;
10976 /* Reciprocal square root step */
10977 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10978 mips32_op
= OPC_RSQRT1_S
;
10980 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10981 mips32_op
= OPC_RSQRT1_D
;
10983 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10984 mips32_op
= OPC_RSQRT1_PS
;
10987 /* Reciprocal step */
10988 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10989 mips32_op
= OPC_RECIP1_S
;
10991 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10992 mips32_op
= OPC_RECIP1_S
;
10994 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10995 mips32_op
= OPC_RECIP1_PS
;
10998 /* Conversions from double */
10999 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
11000 mips32_op
= OPC_CVT_D_S
;
11002 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
11003 mips32_op
= OPC_CVT_D_W
;
11005 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
11006 mips32_op
= OPC_CVT_D_L
;
11009 /* Conversions from single */
11010 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
11011 mips32_op
= OPC_CVT_S_D
;
11013 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
11014 mips32_op
= OPC_CVT_S_W
;
11016 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
11017 mips32_op
= OPC_CVT_S_L
;
11019 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
11022 /* Conditional moves on floating-point codes */
11023 case COND_FLOAT_MOV(MOVT
, 0):
11024 case COND_FLOAT_MOV(MOVT
, 1):
11025 case COND_FLOAT_MOV(MOVT
, 2):
11026 case COND_FLOAT_MOV(MOVT
, 3):
11027 case COND_FLOAT_MOV(MOVT
, 4):
11028 case COND_FLOAT_MOV(MOVT
, 5):
11029 case COND_FLOAT_MOV(MOVT
, 6):
11030 case COND_FLOAT_MOV(MOVT
, 7):
11031 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
11033 case COND_FLOAT_MOV(MOVF
, 0):
11034 case COND_FLOAT_MOV(MOVF
, 1):
11035 case COND_FLOAT_MOV(MOVF
, 2):
11036 case COND_FLOAT_MOV(MOVF
, 3):
11037 case COND_FLOAT_MOV(MOVF
, 4):
11038 case COND_FLOAT_MOV(MOVF
, 5):
11039 case COND_FLOAT_MOV(MOVF
, 6):
11040 case COND_FLOAT_MOV(MOVF
, 7):
11041 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
11044 MIPS_INVAL("pool32fxf");
11045 generate_exception(ctx
, EXCP_RI
);
11050 static void decode_micromips32_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
11051 uint16_t insn_hw1
, int *is_branch
)
11055 int rt
, rs
, rd
, rr
;
11057 uint32_t op
, minor
, mips32_op
;
11058 uint32_t cond
, fmt
, cc
;
11060 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11061 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
11063 rt
= (ctx
->opcode
>> 21) & 0x1f;
11064 rs
= (ctx
->opcode
>> 16) & 0x1f;
11065 rd
= (ctx
->opcode
>> 11) & 0x1f;
11066 rr
= (ctx
->opcode
>> 6) & 0x1f;
11067 imm
= (int16_t) ctx
->opcode
;
11069 op
= (ctx
->opcode
>> 26) & 0x3f;
11072 minor
= ctx
->opcode
& 0x3f;
11075 minor
= (ctx
->opcode
>> 6) & 0xf;
11078 mips32_op
= OPC_SLL
;
11081 mips32_op
= OPC_SRA
;
11084 mips32_op
= OPC_SRL
;
11087 mips32_op
= OPC_ROTR
;
11089 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
11092 goto pool32a_invalid
;
11096 minor
= (ctx
->opcode
>> 6) & 0xf;
11100 mips32_op
= OPC_ADD
;
11103 mips32_op
= OPC_ADDU
;
11106 mips32_op
= OPC_SUB
;
11109 mips32_op
= OPC_SUBU
;
11112 mips32_op
= OPC_MUL
;
11114 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11118 mips32_op
= OPC_SLLV
;
11121 mips32_op
= OPC_SRLV
;
11124 mips32_op
= OPC_SRAV
;
11127 mips32_op
= OPC_ROTRV
;
11129 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11131 /* Logical operations */
11133 mips32_op
= OPC_AND
;
11136 mips32_op
= OPC_OR
;
11139 mips32_op
= OPC_NOR
;
11142 mips32_op
= OPC_XOR
;
11144 gen_logic(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11146 /* Set less than */
11148 mips32_op
= OPC_SLT
;
11151 mips32_op
= OPC_SLTU
;
11153 gen_slt(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11156 goto pool32a_invalid
;
11160 minor
= (ctx
->opcode
>> 6) & 0xf;
11162 /* Conditional moves */
11164 mips32_op
= OPC_MOVN
;
11167 mips32_op
= OPC_MOVZ
;
11169 gen_cond_move(env
, ctx
, mips32_op
, rd
, rs
, rt
);
11172 gen_ldxs(ctx
, rs
, rt
, rd
);
11175 goto pool32a_invalid
;
11179 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
11182 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
11185 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
11188 generate_exception(ctx
, EXCP_BREAK
);
11192 MIPS_INVAL("pool32a");
11193 generate_exception(ctx
, EXCP_RI
);
11198 minor
= (ctx
->opcode
>> 12) & 0xf;
11201 check_cp0_enabled(ctx
);
11202 /* Treat as no-op. */
11206 /* COP2: Not implemented. */
11207 generate_exception_err(ctx
, EXCP_CpU
, 2);
11211 #ifdef TARGET_MIPS64
11215 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11219 #ifdef TARGET_MIPS64
11223 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11226 MIPS_INVAL("pool32b");
11227 generate_exception(ctx
, EXCP_RI
);
11232 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11233 minor
= ctx
->opcode
& 0x3f;
11234 check_cp1_enabled(ctx
);
11237 mips32_op
= OPC_ALNV_PS
;
11240 mips32_op
= OPC_MADD_S
;
11243 mips32_op
= OPC_MADD_D
;
11246 mips32_op
= OPC_MADD_PS
;
11249 mips32_op
= OPC_MSUB_S
;
11252 mips32_op
= OPC_MSUB_D
;
11255 mips32_op
= OPC_MSUB_PS
;
11258 mips32_op
= OPC_NMADD_S
;
11261 mips32_op
= OPC_NMADD_D
;
11264 mips32_op
= OPC_NMADD_PS
;
11267 mips32_op
= OPC_NMSUB_S
;
11270 mips32_op
= OPC_NMSUB_D
;
11273 mips32_op
= OPC_NMSUB_PS
;
11275 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
11277 case CABS_COND_FMT
:
11278 cond
= (ctx
->opcode
>> 6) & 0xf;
11279 cc
= (ctx
->opcode
>> 13) & 0x7;
11280 fmt
= (ctx
->opcode
>> 10) & 0x3;
11283 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
11286 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
11289 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
11292 goto pool32f_invalid
;
11296 cond
= (ctx
->opcode
>> 6) & 0xf;
11297 cc
= (ctx
->opcode
>> 13) & 0x7;
11298 fmt
= (ctx
->opcode
>> 10) & 0x3;
11301 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
11304 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
11307 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
11310 goto pool32f_invalid
;
11314 gen_pool32fxf(env
, ctx
, rt
, rs
);
11318 switch ((ctx
->opcode
>> 6) & 0x7) {
11320 mips32_op
= OPC_PLL_PS
;
11323 mips32_op
= OPC_PLU_PS
;
11326 mips32_op
= OPC_PUL_PS
;
11329 mips32_op
= OPC_PUU_PS
;
11332 mips32_op
= OPC_CVT_PS_S
;
11334 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11337 goto pool32f_invalid
;
11342 switch ((ctx
->opcode
>> 6) & 0x7) {
11344 mips32_op
= OPC_LWXC1
;
11347 mips32_op
= OPC_SWXC1
;
11350 mips32_op
= OPC_LDXC1
;
11353 mips32_op
= OPC_SDXC1
;
11356 mips32_op
= OPC_LUXC1
;
11359 mips32_op
= OPC_SUXC1
;
11361 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
11364 goto pool32f_invalid
;
11369 fmt
= (ctx
->opcode
>> 9) & 0x3;
11370 switch ((ctx
->opcode
>> 6) & 0x7) {
11374 mips32_op
= OPC_RSQRT2_S
;
11377 mips32_op
= OPC_RSQRT2_D
;
11380 mips32_op
= OPC_RSQRT2_PS
;
11383 goto pool32f_invalid
;
11389 mips32_op
= OPC_RECIP2_S
;
11392 mips32_op
= OPC_RECIP2_D
;
11395 mips32_op
= OPC_RECIP2_PS
;
11398 goto pool32f_invalid
;
11402 mips32_op
= OPC_ADDR_PS
;
11405 mips32_op
= OPC_MULR_PS
;
11407 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11410 goto pool32f_invalid
;
11414 /* MOV[FT].fmt and PREFX */
11415 cc
= (ctx
->opcode
>> 13) & 0x7;
11416 fmt
= (ctx
->opcode
>> 9) & 0x3;
11417 switch ((ctx
->opcode
>> 6) & 0x7) {
11421 gen_movcf_s(rs
, rt
, cc
, 0);
11424 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
11427 gen_movcf_ps(rs
, rt
, cc
, 0);
11430 goto pool32f_invalid
;
11436 gen_movcf_s(rs
, rt
, cc
, 1);
11439 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
11442 gen_movcf_ps(rs
, rt
, cc
, 1);
11445 goto pool32f_invalid
;
11451 goto pool32f_invalid
;
11454 #define FINSN_3ARG_SDPS(prfx) \
11455 switch ((ctx->opcode >> 8) & 0x3) { \
11457 mips32_op = OPC_##prfx##_S; \
11460 mips32_op = OPC_##prfx##_D; \
11462 case FMT_SDPS_PS: \
11463 mips32_op = OPC_##prfx##_PS; \
11466 goto pool32f_invalid; \
11469 /* regular FP ops */
11470 switch ((ctx
->opcode
>> 6) & 0x3) {
11472 FINSN_3ARG_SDPS(ADD
);
11475 FINSN_3ARG_SDPS(SUB
);
11478 FINSN_3ARG_SDPS(MUL
);
11481 fmt
= (ctx
->opcode
>> 8) & 0x3;
11483 mips32_op
= OPC_DIV_D
;
11484 } else if (fmt
== 0) {
11485 mips32_op
= OPC_DIV_S
;
11487 goto pool32f_invalid
;
11491 goto pool32f_invalid
;
11496 switch ((ctx
->opcode
>> 6) & 0x3) {
11498 FINSN_3ARG_SDPS(MOVN
);
11501 FINSN_3ARG_SDPS(MOVZ
);
11504 goto pool32f_invalid
;
11508 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11512 MIPS_INVAL("pool32f");
11513 generate_exception(ctx
, EXCP_RI
);
11517 generate_exception_err(ctx
, EXCP_CpU
, 1);
11521 minor
= (ctx
->opcode
>> 21) & 0x1f;
11524 mips32_op
= OPC_BLTZ
;
11527 mips32_op
= OPC_BLTZAL
;
11530 mips32_op
= OPC_BLTZALS
;
11533 mips32_op
= OPC_BGEZ
;
11536 mips32_op
= OPC_BGEZAL
;
11539 mips32_op
= OPC_BGEZALS
;
11542 mips32_op
= OPC_BLEZ
;
11545 mips32_op
= OPC_BGTZ
;
11547 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
11553 mips32_op
= OPC_TLTI
;
11556 mips32_op
= OPC_TGEI
;
11559 mips32_op
= OPC_TLTIU
;
11562 mips32_op
= OPC_TGEIU
;
11565 mips32_op
= OPC_TNEI
;
11568 mips32_op
= OPC_TEQI
;
11570 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
11575 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
11576 4, rs
, 0, imm
<< 1);
11577 /* Compact branches don't have a delay slot, so just let
11578 the normal delay slot handling take us to the branch
11582 gen_logic_imm(env
, ctx
, OPC_LUI
, rs
, -1, imm
);
11588 /* COP2: Not implemented. */
11589 generate_exception_err(ctx
, EXCP_CpU
, 2);
11592 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
11595 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
11598 mips32_op
= OPC_BC1FANY4
;
11601 mips32_op
= OPC_BC1TANY4
;
11604 check_insn(env
, ctx
, ASE_MIPS3D
);
11607 gen_compute_branch1(env
, ctx
, mips32_op
,
11608 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
11613 /* MIPS DSP: not implemented */
11616 MIPS_INVAL("pool32i");
11617 generate_exception(ctx
, EXCP_RI
);
11622 minor
= (ctx
->opcode
>> 12) & 0xf;
11625 mips32_op
= OPC_LWL
;
11628 mips32_op
= OPC_SWL
;
11631 mips32_op
= OPC_LWR
;
11634 mips32_op
= OPC_SWR
;
11636 #if defined(TARGET_MIPS64)
11638 mips32_op
= OPC_LDL
;
11641 mips32_op
= OPC_SDL
;
11644 mips32_op
= OPC_LDR
;
11647 mips32_op
= OPC_SDR
;
11650 mips32_op
= OPC_LWU
;
11653 mips32_op
= OPC_LLD
;
11657 mips32_op
= OPC_LL
;
11660 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11663 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11666 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11668 #if defined(TARGET_MIPS64)
11670 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11674 /* Treat as no-op */
11677 MIPS_INVAL("pool32c");
11678 generate_exception(ctx
, EXCP_RI
);
11683 mips32_op
= OPC_ADDI
;
11686 mips32_op
= OPC_ADDIU
;
11688 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11691 /* Logical operations */
11693 mips32_op
= OPC_ORI
;
11696 mips32_op
= OPC_XORI
;
11699 mips32_op
= OPC_ANDI
;
11701 gen_logic_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11704 /* Set less than immediate */
11706 mips32_op
= OPC_SLTI
;
11709 mips32_op
= OPC_SLTIU
;
11711 gen_slt_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11714 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11715 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
11719 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
11720 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
11724 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
11728 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
11732 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
11733 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11737 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
11738 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11741 /* Floating point (COP1) */
11743 mips32_op
= OPC_LWC1
;
11746 mips32_op
= OPC_LDC1
;
11749 mips32_op
= OPC_SWC1
;
11752 mips32_op
= OPC_SDC1
;
11754 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11758 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
11759 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
11761 gen_addiupc(ctx
, reg
, offset
, 0, 0);
11764 /* Loads and stores */
11766 mips32_op
= OPC_LB
;
11769 mips32_op
= OPC_LBU
;
11772 mips32_op
= OPC_LH
;
11775 mips32_op
= OPC_LHU
;
11778 mips32_op
= OPC_LW
;
11780 #ifdef TARGET_MIPS64
11782 mips32_op
= OPC_LD
;
11785 mips32_op
= OPC_SD
;
11789 mips32_op
= OPC_SB
;
11792 mips32_op
= OPC_SH
;
11795 mips32_op
= OPC_SW
;
11798 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11801 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
11804 generate_exception(ctx
, EXCP_RI
);
11809 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
11813 /* make sure instructions are on a halfword boundary */
11814 if (ctx
->pc
& 0x1) {
11815 env
->CP0_BadVAddr
= ctx
->pc
;
11816 generate_exception(ctx
, EXCP_AdEL
);
11817 ctx
->bstate
= BS_STOP
;
11821 op
= (ctx
->opcode
>> 10) & 0x3f;
11822 /* Enforce properly-sized instructions in a delay slot */
11823 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11824 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11858 case POOL48A
: /* ??? */
11863 if (bits
& MIPS_HFLAG_BDS16
) {
11864 generate_exception(ctx
, EXCP_RI
);
11865 /* Just stop translation; the user is confused. */
11866 ctx
->bstate
= BS_STOP
;
11891 if (bits
& MIPS_HFLAG_BDS32
) {
11892 generate_exception(ctx
, EXCP_RI
);
11893 /* Just stop translation; the user is confused. */
11894 ctx
->bstate
= BS_STOP
;
11905 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11906 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11907 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11910 switch (ctx
->opcode
& 0x1) {
11919 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11924 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11925 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11926 int amount
= (ctx
->opcode
>> 1) & 0x7;
11928 amount
= amount
== 0 ? 8 : amount
;
11930 switch (ctx
->opcode
& 0x1) {
11939 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11943 gen_pool16c_insn(env
, ctx
, is_branch
);
11947 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11948 int rb
= 28; /* GP */
11949 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11951 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11955 if (ctx
->opcode
& 1) {
11956 generate_exception(ctx
, EXCP_RI
);
11959 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11960 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11961 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11962 int rd
, rs
, re
, rt
;
11963 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11964 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11965 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11967 rd
= rd_enc
[enc_dest
];
11968 re
= re_enc
[enc_dest
];
11969 rs
= rs_rt_enc
[enc_rs
];
11970 rt
= rs_rt_enc
[enc_rt
];
11972 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11973 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11978 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11979 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11980 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11981 offset
= (offset
== 0xf ? -1 : offset
);
11983 gen_ld(env
, ctx
, OPC_LBU
, rd
, rb
, offset
);
11988 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11989 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11990 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11992 gen_ld(env
, ctx
, OPC_LHU
, rd
, rb
, offset
);
11997 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11998 int rb
= 29; /* SP */
11999 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12001 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
12006 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12007 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12008 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12010 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
12015 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12016 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12017 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
12019 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
12024 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12025 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12026 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
12028 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
12033 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12034 int rb
= 29; /* SP */
12035 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
12037 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12042 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
12043 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
12044 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
12046 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
12051 int rd
= uMIPS_RD5(ctx
->opcode
);
12052 int rs
= uMIPS_RS5(ctx
->opcode
);
12054 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
12058 gen_andi16(env
, ctx
);
12061 switch (ctx
->opcode
& 0x1) {
12063 gen_addius5(env
, ctx
);
12066 gen_addiusp(env
, ctx
);
12071 switch (ctx
->opcode
& 0x1) {
12073 gen_addiur2(env
, ctx
);
12076 gen_addiur1sp(env
, ctx
);
12081 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
12082 SIMM(ctx
->opcode
, 0, 10) << 1);
12087 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
12088 mmreg(uMIPS_RD(ctx
->opcode
)),
12089 0, SIMM(ctx
->opcode
, 0, 7) << 1);
12094 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
12095 int imm
= ZIMM(ctx
->opcode
, 0, 7);
12097 imm
= (imm
== 0x7f ? -1 : imm
);
12098 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
12108 generate_exception(ctx
, EXCP_RI
);
12111 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
12118 /* SmartMIPS extension to MIPS32 */
12120 #if defined(TARGET_MIPS64)
12122 /* MDMX extension to MIPS64 */
12126 static void decode_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
12129 int rs
, rt
, rd
, sa
;
12130 uint32_t op
, op1
, op2
;
12133 /* make sure instructions are on a word boundary */
12134 if (ctx
->pc
& 0x3) {
12135 env
->CP0_BadVAddr
= ctx
->pc
;
12136 generate_exception(ctx
, EXCP_AdEL
);
12140 /* Handle blikely not taken case */
12141 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
12142 int l1
= gen_new_label();
12144 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
12145 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12146 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
12147 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
12151 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
12152 tcg_gen_debug_insn_start(ctx
->pc
);
12155 op
= MASK_OP_MAJOR(ctx
->opcode
);
12156 rs
= (ctx
->opcode
>> 21) & 0x1f;
12157 rt
= (ctx
->opcode
>> 16) & 0x1f;
12158 rd
= (ctx
->opcode
>> 11) & 0x1f;
12159 sa
= (ctx
->opcode
>> 6) & 0x1f;
12160 imm
= (int16_t)ctx
->opcode
;
12163 op1
= MASK_SPECIAL(ctx
->opcode
);
12165 case OPC_SLL
: /* Shift with immediate */
12167 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12170 switch ((ctx
->opcode
>> 21) & 0x1f) {
12172 /* rotr is decoded as srl on non-R2 CPUs */
12173 if (env
->insn_flags
& ISA_MIPS32R2
) {
12178 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12181 generate_exception(ctx
, EXCP_RI
);
12185 case OPC_MOVN
: /* Conditional move */
12187 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
12188 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
12189 gen_cond_move(env
, ctx
, op1
, rd
, rs
, rt
);
12191 case OPC_ADD
... OPC_SUBU
:
12192 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12194 case OPC_SLLV
: /* Shifts */
12196 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12199 switch ((ctx
->opcode
>> 6) & 0x1f) {
12201 /* rotrv is decoded as srlv on non-R2 CPUs */
12202 if (env
->insn_flags
& ISA_MIPS32R2
) {
12207 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12210 generate_exception(ctx
, EXCP_RI
);
12214 case OPC_SLT
: /* Set on less than */
12216 gen_slt(env
, ctx
, op1
, rd
, rs
, rt
);
12218 case OPC_AND
: /* Logic*/
12222 gen_logic(env
, ctx
, op1
, rd
, rs
, rt
);
12224 case OPC_MULT
... OPC_DIVU
:
12226 check_insn(env
, ctx
, INSN_VR54XX
);
12227 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
12228 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
12230 gen_muldiv(ctx
, op1
, rs
, rt
);
12232 case OPC_JR
... OPC_JALR
:
12233 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
12236 case OPC_TGE
... OPC_TEQ
: /* Traps */
12238 gen_trap(ctx
, op1
, rs
, rt
, -1);
12240 case OPC_MFHI
: /* Move from HI/LO */
12242 gen_HILO(ctx
, op1
, rd
);
12245 case OPC_MTLO
: /* Move to HI/LO */
12246 gen_HILO(ctx
, op1
, rs
);
12248 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
12249 #ifdef MIPS_STRICT_STANDARD
12250 MIPS_INVAL("PMON / selsl");
12251 generate_exception(ctx
, EXCP_RI
);
12253 gen_helper_0e0i(pmon
, sa
);
12257 generate_exception(ctx
, EXCP_SYSCALL
);
12258 ctx
->bstate
= BS_STOP
;
12261 generate_exception(ctx
, EXCP_BREAK
);
12264 #ifdef MIPS_STRICT_STANDARD
12265 MIPS_INVAL("SPIM");
12266 generate_exception(ctx
, EXCP_RI
);
12268 /* Implemented as RI exception for now. */
12269 MIPS_INVAL("spim (unofficial)");
12270 generate_exception(ctx
, EXCP_RI
);
12274 /* Treat as NOP. */
12278 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12279 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12280 check_cp1_enabled(ctx
);
12281 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
12282 (ctx
->opcode
>> 16) & 1);
12284 generate_exception_err(ctx
, EXCP_CpU
, 1);
12288 #if defined(TARGET_MIPS64)
12289 /* MIPS64 specific opcodes */
12294 check_insn(env
, ctx
, ISA_MIPS3
);
12295 check_mips_64(ctx
);
12296 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12299 switch ((ctx
->opcode
>> 21) & 0x1f) {
12301 /* drotr is decoded as dsrl on non-R2 CPUs */
12302 if (env
->insn_flags
& ISA_MIPS32R2
) {
12307 check_insn(env
, ctx
, ISA_MIPS3
);
12308 check_mips_64(ctx
);
12309 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12312 generate_exception(ctx
, EXCP_RI
);
12317 switch ((ctx
->opcode
>> 21) & 0x1f) {
12319 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
12320 if (env
->insn_flags
& ISA_MIPS32R2
) {
12325 check_insn(env
, ctx
, ISA_MIPS3
);
12326 check_mips_64(ctx
);
12327 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
12330 generate_exception(ctx
, EXCP_RI
);
12334 case OPC_DADD
... OPC_DSUBU
:
12335 check_insn(env
, ctx
, ISA_MIPS3
);
12336 check_mips_64(ctx
);
12337 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12341 check_insn(env
, ctx
, ISA_MIPS3
);
12342 check_mips_64(ctx
);
12343 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12346 switch ((ctx
->opcode
>> 6) & 0x1f) {
12348 /* drotrv is decoded as dsrlv on non-R2 CPUs */
12349 if (env
->insn_flags
& ISA_MIPS32R2
) {
12354 check_insn(env
, ctx
, ISA_MIPS3
);
12355 check_mips_64(ctx
);
12356 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
12359 generate_exception(ctx
, EXCP_RI
);
12363 case OPC_DMULT
... OPC_DDIVU
:
12364 check_insn(env
, ctx
, ISA_MIPS3
);
12365 check_mips_64(ctx
);
12366 gen_muldiv(ctx
, op1
, rs
, rt
);
12369 default: /* Invalid */
12370 MIPS_INVAL("special");
12371 generate_exception(ctx
, EXCP_RI
);
12376 op1
= MASK_SPECIAL2(ctx
->opcode
);
12378 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
12379 case OPC_MSUB
... OPC_MSUBU
:
12380 check_insn(env
, ctx
, ISA_MIPS32
);
12381 gen_muldiv(ctx
, op1
, rs
, rt
);
12384 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
12388 check_insn(env
, ctx
, ISA_MIPS32
);
12389 gen_cl(ctx
, op1
, rd
, rs
);
12392 /* XXX: not clear which exception should be raised
12393 * when in debug mode...
12395 check_insn(env
, ctx
, ISA_MIPS32
);
12396 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
12397 generate_exception(ctx
, EXCP_DBp
);
12399 generate_exception(ctx
, EXCP_DBp
);
12401 /* Treat as NOP. */
12404 case OPC_DIVU_G_2F
:
12405 case OPC_MULT_G_2F
:
12406 case OPC_MULTU_G_2F
:
12408 case OPC_MODU_G_2F
:
12409 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12410 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12412 #if defined(TARGET_MIPS64)
12415 check_insn(env
, ctx
, ISA_MIPS64
);
12416 check_mips_64(ctx
);
12417 gen_cl(ctx
, op1
, rd
, rs
);
12419 case OPC_DMULT_G_2F
:
12420 case OPC_DMULTU_G_2F
:
12421 case OPC_DDIV_G_2F
:
12422 case OPC_DDIVU_G_2F
:
12423 case OPC_DMOD_G_2F
:
12424 case OPC_DMODU_G_2F
:
12425 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12426 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12429 default: /* Invalid */
12430 MIPS_INVAL("special2");
12431 generate_exception(ctx
, EXCP_RI
);
12436 op1
= MASK_SPECIAL3(ctx
->opcode
);
12440 check_insn(env
, ctx
, ISA_MIPS32R2
);
12441 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12444 check_insn(env
, ctx
, ISA_MIPS32R2
);
12445 op2
= MASK_BSHFL(ctx
->opcode
);
12446 gen_bshfl(ctx
, op2
, rt
, rd
);
12449 gen_rdhwr(env
, ctx
, rt
, rd
);
12452 check_insn(env
, ctx
, ASE_MT
);
12454 TCGv t0
= tcg_temp_new();
12455 TCGv t1
= tcg_temp_new();
12457 gen_load_gpr(t0
, rt
);
12458 gen_load_gpr(t1
, rs
);
12459 gen_helper_fork(t0
, t1
);
12465 check_insn(env
, ctx
, ASE_MT
);
12467 TCGv t0
= tcg_temp_new();
12469 save_cpu_state(ctx
, 1);
12470 gen_load_gpr(t0
, rs
);
12471 gen_helper_yield(t0
, cpu_env
, t0
);
12472 gen_store_gpr(t0
, rd
);
12476 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
12477 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
12478 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
12479 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12480 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12482 #if defined(TARGET_MIPS64)
12483 case OPC_DEXTM
... OPC_DEXT
:
12484 case OPC_DINSM
... OPC_DINS
:
12485 check_insn(env
, ctx
, ISA_MIPS64R2
);
12486 check_mips_64(ctx
);
12487 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12490 check_insn(env
, ctx
, ISA_MIPS64R2
);
12491 check_mips_64(ctx
);
12492 op2
= MASK_DBSHFL(ctx
->opcode
);
12493 gen_bshfl(ctx
, op2
, rt
, rd
);
12495 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
12496 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
12497 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
12498 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12499 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12502 default: /* Invalid */
12503 MIPS_INVAL("special3");
12504 generate_exception(ctx
, EXCP_RI
);
12509 op1
= MASK_REGIMM(ctx
->opcode
);
12511 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
12512 case OPC_BLTZAL
... OPC_BGEZALL
:
12513 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
12516 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
12518 gen_trap(ctx
, op1
, rs
, -1, imm
);
12521 check_insn(env
, ctx
, ISA_MIPS32R2
);
12522 /* Treat as NOP. */
12524 default: /* Invalid */
12525 MIPS_INVAL("regimm");
12526 generate_exception(ctx
, EXCP_RI
);
12531 check_cp0_enabled(ctx
);
12532 op1
= MASK_CP0(ctx
->opcode
);
12538 #if defined(TARGET_MIPS64)
12542 #ifndef CONFIG_USER_ONLY
12543 gen_cp0(env
, ctx
, op1
, rt
, rd
);
12544 #endif /* !CONFIG_USER_ONLY */
12546 case OPC_C0_FIRST
... OPC_C0_LAST
:
12547 #ifndef CONFIG_USER_ONLY
12548 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
12549 #endif /* !CONFIG_USER_ONLY */
12552 #ifndef CONFIG_USER_ONLY
12554 TCGv t0
= tcg_temp_new();
12556 op2
= MASK_MFMC0(ctx
->opcode
);
12559 check_insn(env
, ctx
, ASE_MT
);
12560 gen_helper_dmt(t0
);
12561 gen_store_gpr(t0
, rt
);
12564 check_insn(env
, ctx
, ASE_MT
);
12565 gen_helper_emt(t0
);
12566 gen_store_gpr(t0
, rt
);
12569 check_insn(env
, ctx
, ASE_MT
);
12570 gen_helper_dvpe(t0
, cpu_env
);
12571 gen_store_gpr(t0
, rt
);
12574 check_insn(env
, ctx
, ASE_MT
);
12575 gen_helper_evpe(t0
, cpu_env
);
12576 gen_store_gpr(t0
, rt
);
12579 check_insn(env
, ctx
, ISA_MIPS32R2
);
12580 save_cpu_state(ctx
, 1);
12581 gen_helper_di(t0
, cpu_env
);
12582 gen_store_gpr(t0
, rt
);
12583 /* Stop translation as we may have switched the execution mode */
12584 ctx
->bstate
= BS_STOP
;
12587 check_insn(env
, ctx
, ISA_MIPS32R2
);
12588 save_cpu_state(ctx
, 1);
12589 gen_helper_ei(t0
, cpu_env
);
12590 gen_store_gpr(t0
, rt
);
12591 /* Stop translation as we may have switched the execution mode */
12592 ctx
->bstate
= BS_STOP
;
12594 default: /* Invalid */
12595 MIPS_INVAL("mfmc0");
12596 generate_exception(ctx
, EXCP_RI
);
12601 #endif /* !CONFIG_USER_ONLY */
12604 check_insn(env
, ctx
, ISA_MIPS32R2
);
12605 gen_load_srsgpr(rt
, rd
);
12608 check_insn(env
, ctx
, ISA_MIPS32R2
);
12609 gen_store_srsgpr(rt
, rd
);
12613 generate_exception(ctx
, EXCP_RI
);
12617 case OPC_ADDI
: /* Arithmetic with immediate opcode */
12619 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12621 case OPC_SLTI
: /* Set on less than with immediate opcode */
12623 gen_slt_imm(env
, ctx
, op
, rt
, rs
, imm
);
12625 case OPC_ANDI
: /* Arithmetic with immediate opcode */
12629 gen_logic_imm(env
, ctx
, op
, rt
, rs
, imm
);
12631 case OPC_J
... OPC_JAL
: /* Jump */
12632 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12633 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12636 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
12637 case OPC_BEQL
... OPC_BGTZL
:
12638 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
12641 case OPC_LB
... OPC_LWR
: /* Load and stores */
12643 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12645 case OPC_SB
... OPC_SW
:
12647 gen_st(ctx
, op
, rt
, rs
, imm
);
12650 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12653 check_cp0_enabled(ctx
);
12654 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
12655 /* Treat as NOP. */
12658 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12659 /* Treat as NOP. */
12662 /* Floating point (COP1). */
12667 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
12671 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12672 check_cp1_enabled(ctx
);
12673 op1
= MASK_CP1(ctx
->opcode
);
12677 check_insn(env
, ctx
, ISA_MIPS32R2
);
12682 gen_cp1(ctx
, op1
, rt
, rd
);
12684 #if defined(TARGET_MIPS64)
12687 check_insn(env
, ctx
, ISA_MIPS3
);
12688 gen_cp1(ctx
, op1
, rt
, rd
);
12694 check_insn(env
, ctx
, ASE_MIPS3D
);
12697 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
12698 (rt
>> 2) & 0x7, imm
<< 2);
12706 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
12711 generate_exception (ctx
, EXCP_RI
);
12715 generate_exception_err(ctx
, EXCP_CpU
, 1);
12724 /* COP2: Not implemented. */
12725 generate_exception_err(ctx
, EXCP_CpU
, 2);
12728 check_insn(env
, ctx
, INSN_LOONGSON2F
);
12729 /* Note that these instructions use different fields. */
12730 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
12734 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12735 check_cp1_enabled(ctx
);
12736 op1
= MASK_CP3(ctx
->opcode
);
12744 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
12747 /* Treat as NOP. */
12762 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
12766 generate_exception (ctx
, EXCP_RI
);
12770 generate_exception_err(ctx
, EXCP_CpU
, 1);
12774 #if defined(TARGET_MIPS64)
12775 /* MIPS64 opcodes */
12777 case OPC_LDL
... OPC_LDR
:
12780 check_insn(env
, ctx
, ISA_MIPS3
);
12781 check_mips_64(ctx
);
12782 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12784 case OPC_SDL
... OPC_SDR
:
12786 check_insn(env
, ctx
, ISA_MIPS3
);
12787 check_mips_64(ctx
);
12788 gen_st(ctx
, op
, rt
, rs
, imm
);
12791 check_insn(env
, ctx
, ISA_MIPS3
);
12792 check_mips_64(ctx
);
12793 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12797 check_insn(env
, ctx
, ISA_MIPS3
);
12798 check_mips_64(ctx
);
12799 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12803 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
12804 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12805 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12809 check_insn(env
, ctx
, ASE_MDMX
);
12810 /* MDMX: Not implemented. */
12811 default: /* Invalid */
12812 MIPS_INVAL("major opcode");
12813 generate_exception(ctx
, EXCP_RI
);
12819 gen_intermediate_code_internal (CPUMIPSState
*env
, TranslationBlock
*tb
,
12823 target_ulong pc_start
;
12824 uint16_t *gen_opc_end
;
12833 qemu_log("search pc %d\n", search_pc
);
12836 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
12839 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
12841 ctx
.bstate
= BS_NONE
;
12842 /* Restore delay slot state from the tb context. */
12843 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
12844 restore_cpu_state(env
, &ctx
);
12845 #ifdef CONFIG_USER_ONLY
12846 ctx
.mem_idx
= MIPS_HFLAG_UM
;
12848 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
12851 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
12852 if (max_insns
== 0)
12853 max_insns
= CF_COUNT_MASK
;
12854 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
12855 gen_icount_start();
12856 while (ctx
.bstate
== BS_NONE
) {
12857 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
12858 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
12859 if (bp
->pc
== ctx
.pc
) {
12860 save_cpu_state(&ctx
, 1);
12861 ctx
.bstate
= BS_BRANCH
;
12862 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12863 /* Include the breakpoint location or the tb won't
12864 * be flushed when it must be. */
12866 goto done_generating
;
12872 j
= gen_opc_ptr
- gen_opc_buf
;
12876 gen_opc_instr_start
[lj
++] = 0;
12878 gen_opc_pc
[lj
] = ctx
.pc
;
12879 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12880 gen_opc_instr_start
[lj
] = 1;
12881 gen_opc_icount
[lj
] = num_insns
;
12883 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12887 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12888 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
12890 decode_opc(env
, &ctx
, &is_branch
);
12891 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12892 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12893 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12894 } else if (env
->insn_flags
& ASE_MIPS16
) {
12895 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
12896 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12898 generate_exception(&ctx
, EXCP_RI
);
12899 ctx
.bstate
= BS_STOP
;
12903 handle_delay_slot(env
, &ctx
, insn_bytes
);
12905 ctx
.pc
+= insn_bytes
;
12909 /* Execute a branch and its delay slot as a single instruction.
12910 This is what GDB expects and is consistent with what the
12911 hardware does (e.g. if a delay slot instruction faults, the
12912 reported PC is the PC of the branch). */
12913 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12916 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12919 if (gen_opc_ptr
>= gen_opc_end
)
12922 if (num_insns
>= max_insns
)
12928 if (tb
->cflags
& CF_LAST_IO
)
12930 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12931 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12932 gen_helper_0e0i(raise_exception
, EXCP_DEBUG
);
12934 switch (ctx
.bstate
) {
12936 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12939 save_cpu_state(&ctx
, 0);
12940 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12943 tcg_gen_exit_tb(0);
12951 gen_icount_end(tb
, num_insns
);
12952 *gen_opc_ptr
= INDEX_op_end
;
12954 j
= gen_opc_ptr
- gen_opc_buf
;
12957 gen_opc_instr_start
[lj
++] = 0;
12959 tb
->size
= ctx
.pc
- pc_start
;
12960 tb
->icount
= num_insns
;
12964 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12965 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12966 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12972 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12974 gen_intermediate_code_internal(env
, tb
, 0);
12977 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12979 gen_intermediate_code_internal(env
, tb
, 1);
12982 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
12986 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12988 #define printfpr(fp) \
12991 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12992 " fd:%13g fs:%13g psu: %13g\n", \
12993 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12994 (double)(fp)->fd, \
12995 (double)(fp)->fs[FP_ENDIAN_IDX], \
12996 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12999 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
13000 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
13001 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
13002 " fd:%13g fs:%13g psu:%13g\n", \
13003 tmp.w[FP_ENDIAN_IDX], tmp.d, \
13005 (double)tmp.fs[FP_ENDIAN_IDX], \
13006 (double)tmp.fs[!FP_ENDIAN_IDX]); \
13011 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
13012 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
13013 get_float_exception_flags(&env
->active_fpu
.fp_status
));
13014 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
13015 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
13016 printfpr(&env
->active_fpu
.fpr
[i
]);
13022 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
13023 /* Debug help: The architecture requires 32bit code to maintain proper
13024 sign-extended values on 64bit machines. */
13026 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
13029 cpu_mips_check_sign_extensions (CPUMIPSState
*env
, FILE *f
,
13030 fprintf_function cpu_fprintf
,
13035 if (!SIGN_EXT_P(env
->active_tc
.PC
))
13036 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
13037 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
13038 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
13039 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
13040 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
13041 if (!SIGN_EXT_P(env
->btarget
))
13042 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
13044 for (i
= 0; i
< 32; i
++) {
13045 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
13046 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
13049 if (!SIGN_EXT_P(env
->CP0_EPC
))
13050 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
13051 if (!SIGN_EXT_P(env
->lladdr
))
13052 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
13056 void cpu_dump_state (CPUMIPSState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
13061 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
13062 " LO=0x" TARGET_FMT_lx
" ds %04x "
13063 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
13064 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
13065 env
->hflags
, env
->btarget
, env
->bcond
);
13066 for (i
= 0; i
< 32; i
++) {
13068 cpu_fprintf(f
, "GPR%02d:", i
);
13069 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
13071 cpu_fprintf(f
, "\n");
13074 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
13075 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
13076 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
13077 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
13078 if (env
->hflags
& MIPS_HFLAG_FPU
)
13079 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
13080 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
13081 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
13085 static void mips_tcg_init(void)
13090 /* Initialize various static tables. */
13094 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
13095 TCGV_UNUSED(cpu_gpr
[0]);
13096 for (i
= 1; i
< 32; i
++)
13097 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
13098 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
13101 for (i
= 0; i
< 32; i
++) {
13102 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
]);
13103 fpu_f64
[i
] = tcg_global_mem_new_i64(TCG_AREG0
, off
, fregnames
[i
]);
13106 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
13107 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
13108 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
13109 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
13110 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
13112 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
13113 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
13115 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
13116 offsetof(CPUMIPSState
, active_tc
.ACX
[i
]),
13119 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
13120 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
13122 bcond
= tcg_global_mem_new(TCG_AREG0
,
13123 offsetof(CPUMIPSState
, bcond
), "bcond");
13124 btarget
= tcg_global_mem_new(TCG_AREG0
,
13125 offsetof(CPUMIPSState
, btarget
), "btarget");
13126 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
13127 offsetof(CPUMIPSState
, hflags
), "hflags");
13129 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
13130 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
13132 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
13133 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
13136 /* register helpers */
13137 #define GEN_HELPER 2
13138 #include "helper.h"
13143 #include "translate_init.c"
13145 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
13149 const mips_def_t
*def
;
13151 def
= cpu_mips_find_by_name(cpu_model
);
13154 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
13156 env
->cpu_model
= def
;
13157 env
->cpu_model_str
= cpu_model
;
13159 #ifndef CONFIG_USER_ONLY
13160 mmu_init(env
, def
);
13162 fpu_init(env
, def
);
13163 mvp_init(env
, def
);
13165 cpu_reset(CPU(cpu
));
13166 qemu_init_vcpu(env
);
13170 void cpu_state_reset(CPUMIPSState
*env
)
13172 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
13173 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
13174 log_cpu_state(env
, 0);
13177 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
13180 /* Reset registers to their default values */
13181 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
13182 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
13183 #ifdef TARGET_WORDS_BIGENDIAN
13184 env
->CP0_Config0
|= (1 << CP0C0_BE
);
13186 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
13187 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
13188 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
13189 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
13190 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
13191 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
13192 << env
->cpu_model
->CP0_LLAddr_shift
;
13193 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
13194 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
13195 env
->CCRes
= env
->cpu_model
->CCRes
;
13196 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
13197 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
13198 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
13199 env
->current_tc
= 0;
13200 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
13201 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
13202 #if defined(TARGET_MIPS64)
13203 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
13204 env
->SEGMask
|= 3ULL << 62;
13207 env
->PABITS
= env
->cpu_model
->PABITS
;
13208 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
13209 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
13210 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
13211 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
13212 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
13213 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
13214 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
13215 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
13216 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
13217 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
13218 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
13219 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
13220 env
->insn_flags
= env
->cpu_model
->insn_flags
;
13222 #if defined(CONFIG_USER_ONLY)
13223 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
13224 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
13225 hardware registers. */
13226 env
->CP0_HWREna
|= 0x0000000F;
13227 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
13228 env
->CP0_Status
|= (1 << CP0St_CU1
);
13230 if (env
->cpu_model
->insn_flags
& ASE_DSPR2
) {
13231 env
->hflags
|= MIPS_HFLAG_DSP
| MIPS_HFLAG_DSPR2
;
13232 } else if (env
->cpu_model
->insn_flags
& ASE_DSP
) {
13233 env
->hflags
|= MIPS_HFLAG_DSP
;
13236 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
13237 /* If the exception was raised from a delay slot,
13238 come back to the jump. */
13239 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
13241 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
13243 env
->active_tc
.PC
= (int32_t)0xBFC00000;
13244 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
13245 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
13246 env
->CP0_Wired
= 0;
13247 env
->CP0_EBase
= 0x80000000 | (env
->cpu_index
& 0x3FF);
13248 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
13249 /* vectored interrupts not implemented, timer on int 7,
13250 no performance counters. */
13251 env
->CP0_IntCtl
= 0xe0000000;
13255 for (i
= 0; i
< 7; i
++) {
13256 env
->CP0_WatchLo
[i
] = 0;
13257 env
->CP0_WatchHi
[i
] = 0x80000000;
13259 env
->CP0_WatchLo
[7] = 0;
13260 env
->CP0_WatchHi
[7] = 0;
13262 /* Count register increments in debug mode, EJTAG version 1 */
13263 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
13265 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
13268 /* Only TC0 on VPE 0 starts as active. */
13269 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
13270 env
->tcs
[i
].CP0_TCBind
= env
->cpu_index
<< CP0TCBd_CurVPE
;
13271 env
->tcs
[i
].CP0_TCHalt
= 1;
13273 env
->active_tc
.CP0_TCHalt
= 1;
13276 if (!env
->cpu_index
) {
13277 /* VPE0 starts up enabled. */
13278 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
13279 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
13281 /* TC0 starts up unhalted. */
13283 env
->active_tc
.CP0_TCHalt
= 0;
13284 env
->tcs
[0].CP0_TCHalt
= 0;
13285 /* With thread 0 active. */
13286 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
13287 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
13291 compute_hflags(env
);
13292 env
->exception_index
= EXCP_NONE
;
13295 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
13297 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
13298 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
13299 env
->hflags
|= gen_opc_hflags
[pc_pos
];