2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
30 #include "hw/mips/cpudevs.h"
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 #include "exec/semihost.h"
36 #include "target/mips/trace.h"
37 #include "trace-tcg.h"
40 #define MIPS_DEBUG_DISAS 0
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL
= (0x00 << 26),
48 OPC_REGIMM
= (0x01 << 26),
49 OPC_CP0
= (0x10 << 26),
50 OPC_CP1
= (0x11 << 26),
51 OPC_CP2
= (0x12 << 26),
52 OPC_CP3
= (0x13 << 26),
53 OPC_SPECIAL2
= (0x1C << 26),
54 OPC_SPECIAL3
= (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI
= (0x08 << 26),
57 OPC_ADDIU
= (0x09 << 26),
58 OPC_SLTI
= (0x0A << 26),
59 OPC_SLTIU
= (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI
= (0x18 << 26),
67 OPC_DADDIU
= (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL
= (0x03 << 26),
71 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL
= (0x14 << 26),
73 OPC_BNE
= (0x05 << 26),
74 OPC_BNEL
= (0x15 << 26),
75 OPC_BLEZ
= (0x06 << 26),
76 OPC_BLEZL
= (0x16 << 26),
77 OPC_BGTZ
= (0x07 << 26),
78 OPC_BGTZL
= (0x17 << 26),
79 OPC_JALX
= (0x1D << 26),
80 OPC_DAUI
= (0x1D << 26),
82 OPC_LDL
= (0x1A << 26),
83 OPC_LDR
= (0x1B << 26),
84 OPC_LB
= (0x20 << 26),
85 OPC_LH
= (0x21 << 26),
86 OPC_LWL
= (0x22 << 26),
87 OPC_LW
= (0x23 << 26),
88 OPC_LWPC
= OPC_LW
| 0x5,
89 OPC_LBU
= (0x24 << 26),
90 OPC_LHU
= (0x25 << 26),
91 OPC_LWR
= (0x26 << 26),
92 OPC_LWU
= (0x27 << 26),
93 OPC_SB
= (0x28 << 26),
94 OPC_SH
= (0x29 << 26),
95 OPC_SWL
= (0x2A << 26),
96 OPC_SW
= (0x2B << 26),
97 OPC_SDL
= (0x2C << 26),
98 OPC_SDR
= (0x2D << 26),
99 OPC_SWR
= (0x2E << 26),
100 OPC_LL
= (0x30 << 26),
101 OPC_LLD
= (0x34 << 26),
102 OPC_LD
= (0x37 << 26),
103 OPC_LDPC
= OPC_LD
| 0x5,
104 OPC_SC
= (0x38 << 26),
105 OPC_SCD
= (0x3C << 26),
106 OPC_SD
= (0x3F << 26),
107 /* Floating point load/store */
108 OPC_LWC1
= (0x31 << 26),
109 OPC_LWC2
= (0x32 << 26),
110 OPC_LDC1
= (0x35 << 26),
111 OPC_LDC2
= (0x36 << 26),
112 OPC_SWC1
= (0x39 << 26),
113 OPC_SWC2
= (0x3A << 26),
114 OPC_SDC1
= (0x3D << 26),
115 OPC_SDC2
= (0x3E << 26),
116 /* Compact Branches */
117 OPC_BLEZALC
= (0x06 << 26),
118 OPC_BGEZALC
= (0x06 << 26),
119 OPC_BGEUC
= (0x06 << 26),
120 OPC_BGTZALC
= (0x07 << 26),
121 OPC_BLTZALC
= (0x07 << 26),
122 OPC_BLTUC
= (0x07 << 26),
123 OPC_BOVC
= (0x08 << 26),
124 OPC_BEQZALC
= (0x08 << 26),
125 OPC_BEQC
= (0x08 << 26),
126 OPC_BLEZC
= (0x16 << 26),
127 OPC_BGEZC
= (0x16 << 26),
128 OPC_BGEC
= (0x16 << 26),
129 OPC_BGTZC
= (0x17 << 26),
130 OPC_BLTZC
= (0x17 << 26),
131 OPC_BLTC
= (0x17 << 26),
132 OPC_BNVC
= (0x18 << 26),
133 OPC_BNEZALC
= (0x18 << 26),
134 OPC_BNEC
= (0x18 << 26),
135 OPC_BC
= (0x32 << 26),
136 OPC_BEQZC
= (0x36 << 26),
137 OPC_JIC
= (0x36 << 26),
138 OPC_BALC
= (0x3A << 26),
139 OPC_BNEZC
= (0x3E << 26),
140 OPC_JIALC
= (0x3E << 26),
141 /* MDMX ASE specific */
142 OPC_MDMX
= (0x1E << 26),
143 /* MSA ASE, same as MDMX */
145 /* Cache and prefetch */
146 OPC_CACHE
= (0x2F << 26),
147 OPC_PREF
= (0x33 << 26),
148 /* PC-relative address computation / loads */
149 OPC_PCREL
= (0x3B << 26),
152 /* PC-relative address computation / loads */
153 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
154 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
156 /* Instructions determined by bits 19 and 20 */
157 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
158 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
159 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
161 /* Instructions determined by bits 16 ... 20 */
162 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
163 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
166 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
169 /* MIPS special opcodes */
170 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
174 OPC_SLL
= 0x00 | OPC_SPECIAL
,
175 /* NOP is SLL r0, r0, 0 */
176 /* SSNOP is SLL r0, r0, 1 */
177 /* EHB is SLL r0, r0, 3 */
178 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
179 OPC_ROTR
= OPC_SRL
| (1 << 21),
180 OPC_SRA
= 0x03 | OPC_SPECIAL
,
181 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
182 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
183 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
184 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
185 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
186 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
187 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
188 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
189 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
190 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
191 OPC_DROTR
= OPC_DSRL
| (1 << 21),
192 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
193 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
194 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
195 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
196 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
197 /* Multiplication / division */
198 OPC_MULT
= 0x18 | OPC_SPECIAL
,
199 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
200 OPC_DIV
= 0x1A | OPC_SPECIAL
,
201 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
202 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
203 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
204 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
205 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
207 /* 2 registers arithmetic / logic */
208 OPC_ADD
= 0x20 | OPC_SPECIAL
,
209 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
210 OPC_SUB
= 0x22 | OPC_SPECIAL
,
211 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
212 OPC_AND
= 0x24 | OPC_SPECIAL
,
213 OPC_OR
= 0x25 | OPC_SPECIAL
,
214 OPC_XOR
= 0x26 | OPC_SPECIAL
,
215 OPC_NOR
= 0x27 | OPC_SPECIAL
,
216 OPC_SLT
= 0x2A | OPC_SPECIAL
,
217 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
218 OPC_DADD
= 0x2C | OPC_SPECIAL
,
219 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
220 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
221 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
223 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
224 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
226 OPC_TGE
= 0x30 | OPC_SPECIAL
,
227 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
228 OPC_TLT
= 0x32 | OPC_SPECIAL
,
229 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
230 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
231 OPC_TNE
= 0x36 | OPC_SPECIAL
,
232 /* HI / LO registers load & stores */
233 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
234 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
235 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
236 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
237 /* Conditional moves */
238 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
239 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
241 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
242 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
244 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
247 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
248 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
249 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
250 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
251 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
253 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
254 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
255 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
256 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
259 /* R6 Multiply and Divide instructions have the same Opcode
260 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
261 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
264 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
265 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
266 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
267 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
268 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
269 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
270 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
271 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
273 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
274 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
275 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
276 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
277 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
278 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
279 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
280 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
282 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
283 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
284 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
285 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
286 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
288 OPC_LSA
= 0x05 | OPC_SPECIAL
,
289 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
292 /* Multiplication variants of the vr54xx. */
293 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
296 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
297 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
299 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
301 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
303 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
305 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
306 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
307 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
308 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
309 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
312 /* REGIMM (rt field) opcodes */
313 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
316 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
317 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
318 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
319 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
320 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
321 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
322 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
323 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
324 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
325 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
326 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
327 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
328 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
329 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
330 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
331 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
333 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
334 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
337 /* Special2 opcodes */
338 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
341 /* Multiply & xxx operations */
342 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
343 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
344 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
345 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
346 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
348 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
349 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
350 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
351 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
352 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
353 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
354 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
355 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
356 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
357 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
358 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
359 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
361 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
362 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
363 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
364 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
366 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
369 /* Special3 opcodes */
370 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
373 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
374 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
375 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
376 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
377 OPC_INS
= 0x04 | OPC_SPECIAL3
,
378 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
379 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
380 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
381 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
382 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
383 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
384 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
385 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
388 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
389 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
390 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
391 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
392 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
393 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
394 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
395 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
396 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
397 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
398 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
399 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
402 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
403 /* MIPS DSP Arithmetic */
404 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
405 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
407 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
408 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
409 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
410 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
411 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
412 /* MIPS DSP GPR-Based Shift Sub-class */
413 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
414 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
415 /* MIPS DSP Multiply Sub-class insns */
416 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
417 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
418 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
419 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
420 /* DSP Bit/Manipulation Sub-class */
421 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
422 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
423 /* MIPS DSP Append Sub-class */
424 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
425 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
426 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
427 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
428 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
431 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
432 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
433 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
434 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
435 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
436 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
437 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
438 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
439 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
440 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
441 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
442 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
443 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
444 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
445 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
446 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
449 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
450 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
451 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
452 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
453 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
454 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
458 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
461 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
462 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
463 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
464 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
465 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
466 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
470 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
473 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
474 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
475 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
476 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
477 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
480 /* MIPS DSP REGIMM opcodes */
482 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
483 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
486 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
489 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
490 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
491 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
492 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
495 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
497 /* MIPS DSP Arithmetic Sub-class */
498 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
499 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
500 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
501 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
502 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
503 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
504 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
505 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
506 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
507 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
508 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
509 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
510 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
516 /* MIPS DSP Multiply Sub-class insns */
517 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
520 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
521 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
525 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
526 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
528 /* MIPS DSP Arithmetic Sub-class */
529 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
530 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
531 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
532 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
533 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
534 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
535 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
536 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
537 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
538 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
539 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
541 /* MIPS DSP Multiply Sub-class insns */
542 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
548 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
550 /* MIPS DSP Arithmetic Sub-class */
551 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
552 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
553 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
554 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
555 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
556 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
557 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
558 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
559 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
560 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
561 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
564 /* DSP Bit/Manipulation Sub-class */
565 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
572 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
574 /* MIPS DSP Arithmetic Sub-class */
575 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
580 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
581 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
582 /* DSP Compare-Pick Sub-class */
583 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
584 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
585 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
600 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
602 /* MIPS DSP GPR-Based Shift Sub-class */
603 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
607 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
608 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
609 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
610 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
611 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
612 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
613 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
627 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
629 /* MIPS DSP Multiply Sub-class insns */
630 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
634 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
635 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
636 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
637 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
638 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
639 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
640 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
654 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
656 /* DSP Bit/Manipulation Sub-class */
657 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
660 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
662 /* MIPS DSP Append Sub-class */
663 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
664 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
665 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
668 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
670 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
671 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
672 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
673 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
674 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
675 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
676 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
677 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
678 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
679 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
680 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
681 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
683 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
684 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
685 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
686 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
687 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
690 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
692 /* MIPS DSP Arithmetic Sub-class */
693 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
699 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
700 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
701 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
702 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
703 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
710 /* DSP Bit/Manipulation Sub-class */
711 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
719 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
721 /* MIPS DSP Multiply Sub-class insns */
722 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
723 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
724 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
727 /* MIPS DSP Arithmetic Sub-class */
728 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
729 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
730 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
731 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
732 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
733 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
734 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
737 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
739 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
742 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
743 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
747 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
751 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
753 /* DSP Compare-Pick Sub-class */
754 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
764 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
773 /* MIPS DSP Arithmetic Sub-class */
774 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
784 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
786 /* DSP Append Sub-class */
787 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
788 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
789 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
790 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
793 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
795 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
796 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
798 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
799 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
800 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
801 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
802 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
803 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
804 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
805 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
806 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
819 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
821 /* DSP Bit/Manipulation Sub-class */
822 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
825 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
827 /* MIPS DSP Multiply Sub-class insns */
828 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
836 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
837 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
838 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
856 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
858 /* MIPS DSP GPR-Based Shift Sub-class */
859 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
867 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
868 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
869 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
887 /* Coprocessor 0 (rs field) */
888 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
891 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
892 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
893 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
894 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
895 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
896 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
897 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
898 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
899 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
900 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
901 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
902 OPC_C0
= (0x10 << 21) | OPC_CP0
,
903 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
904 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
908 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
911 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
912 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
913 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
914 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
915 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
916 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
917 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
918 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
921 /* Coprocessor 0 (with rs == C0) */
922 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
925 OPC_TLBR
= 0x01 | OPC_C0
,
926 OPC_TLBWI
= 0x02 | OPC_C0
,
927 OPC_TLBINV
= 0x03 | OPC_C0
,
928 OPC_TLBINVF
= 0x04 | OPC_C0
,
929 OPC_TLBWR
= 0x06 | OPC_C0
,
930 OPC_TLBP
= 0x08 | OPC_C0
,
931 OPC_RFE
= 0x10 | OPC_C0
,
932 OPC_ERET
= 0x18 | OPC_C0
,
933 OPC_DERET
= 0x1F | OPC_C0
,
934 OPC_WAIT
= 0x20 | OPC_C0
,
937 /* Coprocessor 1 (rs field) */
938 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
940 /* Values for the fmt field in FP instructions */
942 /* 0 - 15 are reserved */
943 FMT_S
= 16, /* single fp */
944 FMT_D
= 17, /* double fp */
945 FMT_E
= 18, /* extended fp */
946 FMT_Q
= 19, /* quad fp */
947 FMT_W
= 20, /* 32-bit fixed */
948 FMT_L
= 21, /* 64-bit fixed */
949 FMT_PS
= 22, /* paired single fp */
950 /* 23 - 31 are reserved */
954 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
955 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
956 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
957 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
958 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
959 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
960 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
961 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
962 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
963 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
964 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
965 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
966 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
967 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
968 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
969 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
970 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
971 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
972 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
973 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
974 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
975 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
976 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
977 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
978 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
979 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
980 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
981 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
982 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
983 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
986 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
987 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
990 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
991 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
992 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
993 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
997 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
998 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1002 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1003 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1006 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1009 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1010 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1011 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1012 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1013 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1014 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1015 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1016 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1017 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1018 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1019 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1022 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1025 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1026 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1027 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1028 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1029 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1030 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1031 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1032 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1034 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1035 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1036 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1037 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1038 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1039 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1040 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1041 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1043 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1044 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1045 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1046 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1047 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1048 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1049 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1050 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1052 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1053 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1054 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1055 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1056 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1057 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1058 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1059 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1061 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1062 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1063 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1064 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1065 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1066 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1068 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1069 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1070 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1071 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1072 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1073 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1075 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1076 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1077 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1078 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1079 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1080 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1082 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1083 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1084 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1085 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1086 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1087 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1089 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1090 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1091 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1092 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1093 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1094 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1096 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1097 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1098 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1099 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1100 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1101 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1103 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1104 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1105 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1106 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1107 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1108 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1110 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1111 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1112 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1113 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1114 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1115 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1119 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1122 OPC_LWXC1
= 0x00 | OPC_CP3
,
1123 OPC_LDXC1
= 0x01 | OPC_CP3
,
1124 OPC_LUXC1
= 0x05 | OPC_CP3
,
1125 OPC_SWXC1
= 0x08 | OPC_CP3
,
1126 OPC_SDXC1
= 0x09 | OPC_CP3
,
1127 OPC_SUXC1
= 0x0D | OPC_CP3
,
1128 OPC_PREFX
= 0x0F | OPC_CP3
,
1129 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1130 OPC_MADD_S
= 0x20 | OPC_CP3
,
1131 OPC_MADD_D
= 0x21 | OPC_CP3
,
1132 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1133 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1134 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1135 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1136 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1137 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1138 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1139 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1140 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1141 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1145 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1147 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1148 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1149 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1150 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1151 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1152 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1153 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1154 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1155 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1156 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1157 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1158 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1159 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1160 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1161 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1162 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1163 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1164 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1165 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1166 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1167 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1169 /* MI10 instruction */
1170 OPC_LD_B
= (0x20) | OPC_MSA
,
1171 OPC_LD_H
= (0x21) | OPC_MSA
,
1172 OPC_LD_W
= (0x22) | OPC_MSA
,
1173 OPC_LD_D
= (0x23) | OPC_MSA
,
1174 OPC_ST_B
= (0x24) | OPC_MSA
,
1175 OPC_ST_H
= (0x25) | OPC_MSA
,
1176 OPC_ST_W
= (0x26) | OPC_MSA
,
1177 OPC_ST_D
= (0x27) | OPC_MSA
,
1181 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1182 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1183 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1184 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1185 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1186 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1187 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1188 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1189 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1190 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1191 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1192 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1193 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1195 /* I8 instruction */
1196 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1197 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1198 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1199 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1200 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1201 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1202 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1203 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1204 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1205 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1207 /* VEC/2R/2RF instruction */
1208 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1209 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1210 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1211 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1212 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1213 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1214 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1216 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1217 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1219 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1220 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1221 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1222 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1223 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1225 /* 2RF instruction df(bit 16) = _w, _d */
1226 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1227 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1228 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1229 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1230 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1231 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1232 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1233 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1234 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1235 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1236 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1237 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1238 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1239 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1240 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1241 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1243 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1244 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1245 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1246 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1247 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1248 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1249 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1250 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1251 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1252 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1253 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1254 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1255 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1256 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1257 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1258 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1259 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1260 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1261 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1262 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1263 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1264 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1265 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1266 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1267 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1268 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1269 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1270 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1271 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1272 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1273 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1274 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1275 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1276 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1277 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1278 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1279 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1280 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1281 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1282 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1283 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1284 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1285 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1286 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1287 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1288 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1289 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1290 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1291 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1292 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1293 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1294 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1295 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1296 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1297 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1298 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1299 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1300 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1301 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1303 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1304 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1305 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1306 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1308 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1309 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1310 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1311 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1312 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1313 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1314 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1315 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1316 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1317 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1319 /* 3RF instruction _df(bit 21) = _w, _d */
1320 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1330 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1338 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1341 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1342 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1343 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1345 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1350 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1351 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1352 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1353 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1354 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1355 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1356 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1357 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1358 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1359 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1360 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1362 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1363 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1364 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1365 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1366 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1367 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1368 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1369 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1370 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1371 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1372 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1373 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1374 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1377 /* global register indices */
1378 static TCGv_env cpu_env
;
1379 static TCGv cpu_gpr
[32], cpu_PC
;
1380 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1381 static TCGv cpu_dspctrl
, btarget
, bcond
;
1382 static TCGv_i32 hflags
;
1383 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1384 static TCGv_i64 fpu_f64
[32];
1385 static TCGv_i64 msa_wr_d
[64];
1387 #include "exec/gen-icount.h"
1389 #define gen_helper_0e0i(name, arg) do { \
1390 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1391 gen_helper_##name(cpu_env, helper_tmp); \
1392 tcg_temp_free_i32(helper_tmp); \
1395 #define gen_helper_0e1i(name, arg1, arg2) do { \
1396 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1397 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1398 tcg_temp_free_i32(helper_tmp); \
1401 #define gen_helper_1e0i(name, ret, arg1) do { \
1402 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1403 gen_helper_##name(ret, cpu_env, helper_tmp); \
1404 tcg_temp_free_i32(helper_tmp); \
1407 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1408 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1409 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1410 tcg_temp_free_i32(helper_tmp); \
1413 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1414 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1415 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1416 tcg_temp_free_i32(helper_tmp); \
1419 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1420 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1421 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1422 tcg_temp_free_i32(helper_tmp); \
1425 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1426 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1427 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1428 tcg_temp_free_i32(helper_tmp); \
1431 typedef struct DisasContext
{
1432 struct TranslationBlock
*tb
;
1433 target_ulong pc
, saved_pc
;
1435 int singlestep_enabled
;
1437 int32_t CP0_Config1
;
1438 /* Routine used to access memory */
1440 TCGMemOp default_tcg_memop_mask
;
1441 uint32_t hflags
, saved_hflags
;
1443 target_ulong btarget
;
1454 int CP0_LLAddr_shift
;
1464 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1465 * exception condition */
1466 BS_STOP
= 1, /* We want to stop translation for any reason */
1467 BS_BRANCH
= 2, /* We reached a branch condition */
1468 BS_EXCP
= 3, /* We reached an exception condition */
1471 static const char * const regnames
[] = {
1472 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1473 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1474 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1475 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1478 static const char * const regnames_HI
[] = {
1479 "HI0", "HI1", "HI2", "HI3",
1482 static const char * const regnames_LO
[] = {
1483 "LO0", "LO1", "LO2", "LO3",
1486 static const char * const fregnames
[] = {
1487 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1488 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1489 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1490 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1493 static const char * const msaregnames
[] = {
1494 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1495 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1496 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1497 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1498 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1499 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1500 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1501 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1502 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1503 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1504 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1505 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1506 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1507 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1508 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1509 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1512 #define LOG_DISAS(...) \
1514 if (MIPS_DEBUG_DISAS) { \
1515 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1519 #define MIPS_INVAL(op) \
1521 if (MIPS_DEBUG_DISAS) { \
1522 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1523 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1524 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1525 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1529 /* General purpose registers moves. */
1530 static inline void gen_load_gpr (TCGv t
, int reg
)
1533 tcg_gen_movi_tl(t
, 0);
1535 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1538 static inline void gen_store_gpr (TCGv t
, int reg
)
1541 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1544 /* Moves to/from shadow registers. */
1545 static inline void gen_load_srsgpr (int from
, int to
)
1547 TCGv t0
= tcg_temp_new();
1550 tcg_gen_movi_tl(t0
, 0);
1552 TCGv_i32 t2
= tcg_temp_new_i32();
1553 TCGv_ptr addr
= tcg_temp_new_ptr();
1555 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1556 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1557 tcg_gen_andi_i32(t2
, t2
, 0xf);
1558 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1559 tcg_gen_ext_i32_ptr(addr
, t2
);
1560 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1562 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1563 tcg_temp_free_ptr(addr
);
1564 tcg_temp_free_i32(t2
);
1566 gen_store_gpr(t0
, to
);
1570 static inline void gen_store_srsgpr (int from
, int to
)
1573 TCGv t0
= tcg_temp_new();
1574 TCGv_i32 t2
= tcg_temp_new_i32();
1575 TCGv_ptr addr
= tcg_temp_new_ptr();
1577 gen_load_gpr(t0
, from
);
1578 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1579 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1580 tcg_gen_andi_i32(t2
, t2
, 0xf);
1581 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1582 tcg_gen_ext_i32_ptr(addr
, t2
);
1583 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1585 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1586 tcg_temp_free_ptr(addr
);
1587 tcg_temp_free_i32(t2
);
1593 static inline void gen_save_pc(target_ulong pc
)
1595 tcg_gen_movi_tl(cpu_PC
, pc
);
1598 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1600 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1601 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1602 gen_save_pc(ctx
->pc
);
1603 ctx
->saved_pc
= ctx
->pc
;
1605 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1606 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1607 ctx
->saved_hflags
= ctx
->hflags
;
1608 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1614 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1620 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1622 ctx
->saved_hflags
= ctx
->hflags
;
1623 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1629 ctx
->btarget
= env
->btarget
;
1634 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1636 TCGv_i32 texcp
= tcg_const_i32(excp
);
1637 TCGv_i32 terr
= tcg_const_i32(err
);
1638 save_cpu_state(ctx
, 1);
1639 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1640 tcg_temp_free_i32(terr
);
1641 tcg_temp_free_i32(texcp
);
1642 ctx
->bstate
= BS_EXCP
;
1645 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1647 gen_helper_0e0i(raise_exception
, excp
);
1650 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1652 generate_exception_err(ctx
, excp
, 0);
1655 /* Floating point register moves. */
1656 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1658 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1659 generate_exception(ctx
, EXCP_RI
);
1661 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1664 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1668 generate_exception(ctx
, EXCP_RI
);
1670 t64
= tcg_temp_new_i64();
1671 tcg_gen_extu_i32_i64(t64
, t
);
1672 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1673 tcg_temp_free_i64(t64
);
1676 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1681 gen_load_fpr32(ctx
, t
, reg
| 1);
1685 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 TCGv_i64 t64
= tcg_temp_new_i64();
1689 tcg_gen_extu_i32_i64(t64
, t
);
1690 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1691 tcg_temp_free_i64(t64
);
1693 gen_store_fpr32(ctx
, t
, reg
| 1);
1697 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1699 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1700 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1702 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1706 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1708 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1709 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1712 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1713 t0
= tcg_temp_new_i64();
1714 tcg_gen_shri_i64(t0
, t
, 32);
1715 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1716 tcg_temp_free_i64(t0
);
1720 static inline int get_fp_bit (int cc
)
1728 /* Addresses computation */
1729 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1731 tcg_gen_add_tl(ret
, arg0
, arg1
);
1733 #if defined(TARGET_MIPS64)
1734 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1735 tcg_gen_ext32s_i64(ret
, ret
);
1740 /* Addresses computation (translation time) */
1741 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1744 target_long sum
= base
+ offset
;
1746 #if defined(TARGET_MIPS64)
1747 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1754 /* Sign-extract the low 32-bits to a target_long. */
1755 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1757 #if defined(TARGET_MIPS64)
1758 tcg_gen_ext32s_i64(ret
, arg
);
1760 tcg_gen_extrl_i64_i32(ret
, arg
);
1764 /* Sign-extract the high 32-bits to a target_long. */
1765 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1767 #if defined(TARGET_MIPS64)
1768 tcg_gen_sari_i64(ret
, arg
, 32);
1770 tcg_gen_extrh_i64_i32(ret
, arg
);
1774 static inline void check_cp0_enabled(DisasContext
*ctx
)
1776 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1777 generate_exception_err(ctx
, EXCP_CpU
, 0);
1780 static inline void check_cp1_enabled(DisasContext
*ctx
)
1782 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1783 generate_exception_err(ctx
, EXCP_CpU
, 1);
1786 /* Verify that the processor is running with COP1X instructions enabled.
1787 This is associated with the nabla symbol in the MIPS32 and MIPS64
1790 static inline void check_cop1x(DisasContext
*ctx
)
1792 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1793 generate_exception_end(ctx
, EXCP_RI
);
1796 /* Verify that the processor is running with 64-bit floating-point
1797 operations enabled. */
1799 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1801 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1802 generate_exception_end(ctx
, EXCP_RI
);
1806 * Verify if floating point register is valid; an operation is not defined
1807 * if bit 0 of any register specification is set and the FR bit in the
1808 * Status register equals zero, since the register numbers specify an
1809 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1810 * in the Status register equals one, both even and odd register numbers
1811 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1813 * Multiple 64 bit wide registers can be checked by calling
1814 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1816 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1819 generate_exception_end(ctx
, EXCP_RI
);
1822 /* Verify that the processor is running with DSP instructions enabled.
1823 This is enabled by CP0 Status register MX(24) bit.
1826 static inline void check_dsp(DisasContext
*ctx
)
1828 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1829 if (ctx
->insn_flags
& ASE_DSP
) {
1830 generate_exception_end(ctx
, EXCP_DSPDIS
);
1832 generate_exception_end(ctx
, EXCP_RI
);
1837 static inline void check_dspr2(DisasContext
*ctx
)
1839 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1840 if (ctx
->insn_flags
& ASE_DSP
) {
1841 generate_exception_end(ctx
, EXCP_DSPDIS
);
1843 generate_exception_end(ctx
, EXCP_RI
);
1848 /* This code generates a "reserved instruction" exception if the
1849 CPU does not support the instruction set corresponding to flags. */
1850 static inline void check_insn(DisasContext
*ctx
, int flags
)
1852 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1853 generate_exception_end(ctx
, EXCP_RI
);
1857 /* This code generates a "reserved instruction" exception if the
1858 CPU has corresponding flag set which indicates that the instruction
1859 has been removed. */
1860 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1862 if (unlikely(ctx
->insn_flags
& flags
)) {
1863 generate_exception_end(ctx
, EXCP_RI
);
1867 /* This code generates a "reserved instruction" exception if the
1868 CPU does not support 64-bit paired-single (PS) floating point data type */
1869 static inline void check_ps(DisasContext
*ctx
)
1871 if (unlikely(!ctx
->ps
)) {
1872 generate_exception(ctx
, EXCP_RI
);
1874 check_cp1_64bitmode(ctx
);
1877 #ifdef TARGET_MIPS64
1878 /* This code generates a "reserved instruction" exception if 64-bit
1879 instructions are not enabled. */
1880 static inline void check_mips_64(DisasContext
*ctx
)
1882 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1883 generate_exception_end(ctx
, EXCP_RI
);
1887 #ifndef CONFIG_USER_ONLY
1888 static inline void check_mvh(DisasContext
*ctx
)
1890 if (unlikely(!ctx
->mvh
)) {
1891 generate_exception(ctx
, EXCP_RI
);
1896 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1897 calling interface for 32 and 64-bit FPRs. No sense in changing
1898 all callers for gen_load_fpr32 when we need the CTX parameter for
1900 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1901 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1902 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1903 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1904 int ft, int fs, int cc) \
1906 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1907 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1916 check_cp1_registers(ctx, fs | ft); \
1924 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1925 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1927 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1928 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1929 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1930 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1931 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1932 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1933 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1934 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1935 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1936 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1937 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1938 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1939 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1940 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1941 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1942 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1945 tcg_temp_free_i##bits (fp0); \
1946 tcg_temp_free_i##bits (fp1); \
1949 FOP_CONDS(, 0, d
, FMT_D
, 64)
1950 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1951 FOP_CONDS(, 0, s
, FMT_S
, 32)
1952 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1953 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1954 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1957 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1958 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1959 int ft, int fs, int fd) \
1961 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1962 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1963 if (ifmt == FMT_D) { \
1964 check_cp1_registers(ctx, fs | ft | fd); \
1966 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1967 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1970 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2015 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2018 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2021 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2024 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2027 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2030 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2033 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2039 tcg_temp_free_i ## bits (fp0); \
2040 tcg_temp_free_i ## bits (fp1); \
2043 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2044 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2046 #undef gen_ldcmp_fpr32
2047 #undef gen_ldcmp_fpr64
2049 /* load/store instructions. */
2050 #ifdef CONFIG_USER_ONLY
2051 #define OP_LD_ATOMIC(insn,fname) \
2052 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2053 DisasContext *ctx) \
2055 TCGv t0 = tcg_temp_new(); \
2056 tcg_gen_mov_tl(t0, arg1); \
2057 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2058 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2059 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2060 tcg_temp_free(t0); \
2063 #define OP_LD_ATOMIC(insn,fname) \
2064 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2065 DisasContext *ctx) \
2067 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2070 OP_LD_ATOMIC(ll
,ld32s
);
2071 #if defined(TARGET_MIPS64)
2072 OP_LD_ATOMIC(lld
,ld64
);
2076 #ifdef CONFIG_USER_ONLY
2077 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2078 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2079 DisasContext *ctx) \
2081 TCGv t0 = tcg_temp_new(); \
2082 TCGLabel *l1 = gen_new_label(); \
2083 TCGLabel *l2 = gen_new_label(); \
2085 tcg_gen_andi_tl(t0, arg2, almask); \
2086 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2087 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2088 generate_exception(ctx, EXCP_AdES); \
2089 gen_set_label(l1); \
2090 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2091 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2092 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2093 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2094 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2095 generate_exception_end(ctx, EXCP_SC); \
2096 gen_set_label(l2); \
2097 tcg_gen_movi_tl(t0, 0); \
2098 gen_store_gpr(t0, rt); \
2099 tcg_temp_free(t0); \
2102 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2103 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2104 DisasContext *ctx) \
2106 TCGv t0 = tcg_temp_new(); \
2107 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2108 gen_store_gpr(t0, rt); \
2109 tcg_temp_free(t0); \
2112 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2113 #if defined(TARGET_MIPS64)
2114 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2118 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2119 int base
, int16_t offset
)
2122 tcg_gen_movi_tl(addr
, offset
);
2123 } else if (offset
== 0) {
2124 gen_load_gpr(addr
, base
);
2126 tcg_gen_movi_tl(addr
, offset
);
2127 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2131 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2133 target_ulong pc
= ctx
->pc
;
2135 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2136 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2141 pc
&= ~(target_ulong
)3;
2146 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2147 int rt
, int base
, int16_t offset
)
2150 int mem_idx
= ctx
->mem_idx
;
2152 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2153 /* Loongson CPU uses a load to zero register for prefetch.
2154 We emulate it as a NOP. On other CPU we must perform the
2155 actual memory access. */
2159 t0
= tcg_temp_new();
2160 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2163 #if defined(TARGET_MIPS64)
2165 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2166 ctx
->default_tcg_memop_mask
);
2167 gen_store_gpr(t0
, rt
);
2170 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2171 ctx
->default_tcg_memop_mask
);
2172 gen_store_gpr(t0
, rt
);
2176 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2177 gen_store_gpr(t0
, rt
);
2180 t1
= tcg_temp_new();
2181 /* Do a byte access to possibly trigger a page
2182 fault with the unaligned address. */
2183 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2184 tcg_gen_andi_tl(t1
, t0
, 7);
2185 #ifndef TARGET_WORDS_BIGENDIAN
2186 tcg_gen_xori_tl(t1
, t1
, 7);
2188 tcg_gen_shli_tl(t1
, t1
, 3);
2189 tcg_gen_andi_tl(t0
, t0
, ~7);
2190 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2191 tcg_gen_shl_tl(t0
, t0
, t1
);
2192 t2
= tcg_const_tl(-1);
2193 tcg_gen_shl_tl(t2
, t2
, t1
);
2194 gen_load_gpr(t1
, rt
);
2195 tcg_gen_andc_tl(t1
, t1
, t2
);
2197 tcg_gen_or_tl(t0
, t0
, t1
);
2199 gen_store_gpr(t0
, rt
);
2202 t1
= tcg_temp_new();
2203 /* Do a byte access to possibly trigger a page
2204 fault with the unaligned address. */
2205 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2206 tcg_gen_andi_tl(t1
, t0
, 7);
2207 #ifdef TARGET_WORDS_BIGENDIAN
2208 tcg_gen_xori_tl(t1
, t1
, 7);
2210 tcg_gen_shli_tl(t1
, t1
, 3);
2211 tcg_gen_andi_tl(t0
, t0
, ~7);
2212 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2213 tcg_gen_shr_tl(t0
, t0
, t1
);
2214 tcg_gen_xori_tl(t1
, t1
, 63);
2215 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2216 tcg_gen_shl_tl(t2
, t2
, t1
);
2217 gen_load_gpr(t1
, rt
);
2218 tcg_gen_and_tl(t1
, t1
, t2
);
2220 tcg_gen_or_tl(t0
, t0
, t1
);
2222 gen_store_gpr(t0
, rt
);
2225 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2226 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2228 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2229 gen_store_gpr(t0
, rt
);
2233 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2234 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2236 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2237 gen_store_gpr(t0
, rt
);
2240 mem_idx
= MIPS_HFLAG_UM
;
2243 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2244 ctx
->default_tcg_memop_mask
);
2245 gen_store_gpr(t0
, rt
);
2248 mem_idx
= MIPS_HFLAG_UM
;
2251 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2252 ctx
->default_tcg_memop_mask
);
2253 gen_store_gpr(t0
, rt
);
2256 mem_idx
= MIPS_HFLAG_UM
;
2259 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2260 ctx
->default_tcg_memop_mask
);
2261 gen_store_gpr(t0
, rt
);
2264 mem_idx
= MIPS_HFLAG_UM
;
2267 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2268 gen_store_gpr(t0
, rt
);
2271 mem_idx
= MIPS_HFLAG_UM
;
2274 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2275 gen_store_gpr(t0
, rt
);
2278 mem_idx
= MIPS_HFLAG_UM
;
2281 t1
= tcg_temp_new();
2282 /* Do a byte access to possibly trigger a page
2283 fault with the unaligned address. */
2284 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2285 tcg_gen_andi_tl(t1
, t0
, 3);
2286 #ifndef TARGET_WORDS_BIGENDIAN
2287 tcg_gen_xori_tl(t1
, t1
, 3);
2289 tcg_gen_shli_tl(t1
, t1
, 3);
2290 tcg_gen_andi_tl(t0
, t0
, ~3);
2291 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2292 tcg_gen_shl_tl(t0
, t0
, t1
);
2293 t2
= tcg_const_tl(-1);
2294 tcg_gen_shl_tl(t2
, t2
, t1
);
2295 gen_load_gpr(t1
, rt
);
2296 tcg_gen_andc_tl(t1
, t1
, t2
);
2298 tcg_gen_or_tl(t0
, t0
, t1
);
2300 tcg_gen_ext32s_tl(t0
, t0
);
2301 gen_store_gpr(t0
, rt
);
2304 mem_idx
= MIPS_HFLAG_UM
;
2307 t1
= tcg_temp_new();
2308 /* Do a byte access to possibly trigger a page
2309 fault with the unaligned address. */
2310 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2311 tcg_gen_andi_tl(t1
, t0
, 3);
2312 #ifdef TARGET_WORDS_BIGENDIAN
2313 tcg_gen_xori_tl(t1
, t1
, 3);
2315 tcg_gen_shli_tl(t1
, t1
, 3);
2316 tcg_gen_andi_tl(t0
, t0
, ~3);
2317 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2318 tcg_gen_shr_tl(t0
, t0
, t1
);
2319 tcg_gen_xori_tl(t1
, t1
, 31);
2320 t2
= tcg_const_tl(0xfffffffeull
);
2321 tcg_gen_shl_tl(t2
, t2
, t1
);
2322 gen_load_gpr(t1
, rt
);
2323 tcg_gen_and_tl(t1
, t1
, t2
);
2325 tcg_gen_or_tl(t0
, t0
, t1
);
2327 tcg_gen_ext32s_tl(t0
, t0
);
2328 gen_store_gpr(t0
, rt
);
2331 mem_idx
= MIPS_HFLAG_UM
;
2335 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2336 gen_store_gpr(t0
, rt
);
2343 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2344 int base
, int16_t offset
)
2346 TCGv t0
= tcg_temp_new();
2347 TCGv t1
= tcg_temp_new();
2348 int mem_idx
= ctx
->mem_idx
;
2350 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2351 gen_load_gpr(t1
, rt
);
2353 #if defined(TARGET_MIPS64)
2355 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
2356 ctx
->default_tcg_memop_mask
);
2359 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2362 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2366 mem_idx
= MIPS_HFLAG_UM
;
2369 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2370 ctx
->default_tcg_memop_mask
);
2373 mem_idx
= MIPS_HFLAG_UM
;
2376 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2377 ctx
->default_tcg_memop_mask
);
2380 mem_idx
= MIPS_HFLAG_UM
;
2383 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2386 mem_idx
= MIPS_HFLAG_UM
;
2389 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2392 mem_idx
= MIPS_HFLAG_UM
;
2395 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2403 /* Store conditional */
2404 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2405 int base
, int16_t offset
)
2408 int mem_idx
= ctx
->mem_idx
;
2410 #ifdef CONFIG_USER_ONLY
2411 t0
= tcg_temp_local_new();
2412 t1
= tcg_temp_local_new();
2414 t0
= tcg_temp_new();
2415 t1
= tcg_temp_new();
2417 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2418 gen_load_gpr(t1
, rt
);
2420 #if defined(TARGET_MIPS64)
2423 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
2427 mem_idx
= MIPS_HFLAG_UM
;
2431 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
2438 /* Load and store */
2439 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2440 int base
, int16_t offset
)
2442 TCGv t0
= tcg_temp_new();
2444 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2445 /* Don't do NOP if destination is zero: we must perform the actual
2450 TCGv_i32 fp0
= tcg_temp_new_i32();
2451 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2452 ctx
->default_tcg_memop_mask
);
2453 gen_store_fpr32(ctx
, fp0
, ft
);
2454 tcg_temp_free_i32(fp0
);
2459 TCGv_i32 fp0
= tcg_temp_new_i32();
2460 gen_load_fpr32(ctx
, fp0
, ft
);
2461 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2462 ctx
->default_tcg_memop_mask
);
2463 tcg_temp_free_i32(fp0
);
2468 TCGv_i64 fp0
= tcg_temp_new_i64();
2469 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2470 ctx
->default_tcg_memop_mask
);
2471 gen_store_fpr64(ctx
, fp0
, ft
);
2472 tcg_temp_free_i64(fp0
);
2477 TCGv_i64 fp0
= tcg_temp_new_i64();
2478 gen_load_fpr64(ctx
, fp0
, ft
);
2479 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2480 ctx
->default_tcg_memop_mask
);
2481 tcg_temp_free_i64(fp0
);
2485 MIPS_INVAL("flt_ldst");
2486 generate_exception_end(ctx
, EXCP_RI
);
2493 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2494 int rs
, int16_t imm
)
2496 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2497 check_cp1_enabled(ctx
);
2501 check_insn(ctx
, ISA_MIPS2
);
2504 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2507 generate_exception_err(ctx
, EXCP_CpU
, 1);
2511 /* Arithmetic with immediate operand */
2512 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2513 int rt
, int rs
, int16_t imm
)
2515 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2517 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2518 /* If no destination, treat it as a NOP.
2519 For addi, we must generate the overflow exception when needed. */
2525 TCGv t0
= tcg_temp_local_new();
2526 TCGv t1
= tcg_temp_new();
2527 TCGv t2
= tcg_temp_new();
2528 TCGLabel
*l1
= gen_new_label();
2530 gen_load_gpr(t1
, rs
);
2531 tcg_gen_addi_tl(t0
, t1
, uimm
);
2532 tcg_gen_ext32s_tl(t0
, t0
);
2534 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2535 tcg_gen_xori_tl(t2
, t0
, uimm
);
2536 tcg_gen_and_tl(t1
, t1
, t2
);
2538 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2540 /* operands of same sign, result different sign */
2541 generate_exception(ctx
, EXCP_OVERFLOW
);
2543 tcg_gen_ext32s_tl(t0
, t0
);
2544 gen_store_gpr(t0
, rt
);
2550 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2551 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2553 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2556 #if defined(TARGET_MIPS64)
2559 TCGv t0
= tcg_temp_local_new();
2560 TCGv t1
= tcg_temp_new();
2561 TCGv t2
= tcg_temp_new();
2562 TCGLabel
*l1
= gen_new_label();
2564 gen_load_gpr(t1
, rs
);
2565 tcg_gen_addi_tl(t0
, t1
, uimm
);
2567 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2568 tcg_gen_xori_tl(t2
, t0
, uimm
);
2569 tcg_gen_and_tl(t1
, t1
, t2
);
2571 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2573 /* operands of same sign, result different sign */
2574 generate_exception(ctx
, EXCP_OVERFLOW
);
2576 gen_store_gpr(t0
, rt
);
2582 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2584 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2591 /* Logic with immediate operand */
2592 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2593 int rt
, int rs
, int16_t imm
)
2598 /* If no destination, treat it as a NOP. */
2601 uimm
= (uint16_t)imm
;
2604 if (likely(rs
!= 0))
2605 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2607 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2611 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2613 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2616 if (likely(rs
!= 0))
2617 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2619 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2622 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2624 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2625 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2627 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2636 /* Set on less than with immediate operand */
2637 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2638 int rt
, int rs
, int16_t imm
)
2640 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2644 /* If no destination, treat it as a NOP. */
2647 t0
= tcg_temp_new();
2648 gen_load_gpr(t0
, rs
);
2651 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2654 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2660 /* Shifts with immediate operand */
2661 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2662 int rt
, int rs
, int16_t imm
)
2664 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2668 /* If no destination, treat it as a NOP. */
2672 t0
= tcg_temp_new();
2673 gen_load_gpr(t0
, rs
);
2676 tcg_gen_shli_tl(t0
, t0
, uimm
);
2677 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2680 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2684 tcg_gen_ext32u_tl(t0
, t0
);
2685 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2687 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2692 TCGv_i32 t1
= tcg_temp_new_i32();
2694 tcg_gen_trunc_tl_i32(t1
, t0
);
2695 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2696 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2697 tcg_temp_free_i32(t1
);
2699 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2702 #if defined(TARGET_MIPS64)
2704 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2707 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2710 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2714 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2716 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2720 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2723 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2726 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2729 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2737 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2738 int rd
, int rs
, int rt
)
2740 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2741 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2742 /* If no destination, treat it as a NOP.
2743 For add & sub, we must generate the overflow exception when needed. */
2750 TCGv t0
= tcg_temp_local_new();
2751 TCGv t1
= tcg_temp_new();
2752 TCGv t2
= tcg_temp_new();
2753 TCGLabel
*l1
= gen_new_label();
2755 gen_load_gpr(t1
, rs
);
2756 gen_load_gpr(t2
, rt
);
2757 tcg_gen_add_tl(t0
, t1
, t2
);
2758 tcg_gen_ext32s_tl(t0
, t0
);
2759 tcg_gen_xor_tl(t1
, t1
, t2
);
2760 tcg_gen_xor_tl(t2
, t0
, t2
);
2761 tcg_gen_andc_tl(t1
, t2
, t1
);
2763 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2765 /* operands of same sign, result different sign */
2766 generate_exception(ctx
, EXCP_OVERFLOW
);
2768 gen_store_gpr(t0
, rd
);
2773 if (rs
!= 0 && rt
!= 0) {
2774 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2775 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2776 } else if (rs
== 0 && rt
!= 0) {
2777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2778 } else if (rs
!= 0 && rt
== 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2781 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2786 TCGv t0
= tcg_temp_local_new();
2787 TCGv t1
= tcg_temp_new();
2788 TCGv t2
= tcg_temp_new();
2789 TCGLabel
*l1
= gen_new_label();
2791 gen_load_gpr(t1
, rs
);
2792 gen_load_gpr(t2
, rt
);
2793 tcg_gen_sub_tl(t0
, t1
, t2
);
2794 tcg_gen_ext32s_tl(t0
, t0
);
2795 tcg_gen_xor_tl(t2
, t1
, t2
);
2796 tcg_gen_xor_tl(t1
, t0
, t1
);
2797 tcg_gen_and_tl(t1
, t1
, t2
);
2799 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2801 /* operands of different sign, first operand and result different sign */
2802 generate_exception(ctx
, EXCP_OVERFLOW
);
2804 gen_store_gpr(t0
, rd
);
2809 if (rs
!= 0 && rt
!= 0) {
2810 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2811 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2812 } else if (rs
== 0 && rt
!= 0) {
2813 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2815 } else if (rs
!= 0 && rt
== 0) {
2816 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2821 #if defined(TARGET_MIPS64)
2824 TCGv t0
= tcg_temp_local_new();
2825 TCGv t1
= tcg_temp_new();
2826 TCGv t2
= tcg_temp_new();
2827 TCGLabel
*l1
= gen_new_label();
2829 gen_load_gpr(t1
, rs
);
2830 gen_load_gpr(t2
, rt
);
2831 tcg_gen_add_tl(t0
, t1
, t2
);
2832 tcg_gen_xor_tl(t1
, t1
, t2
);
2833 tcg_gen_xor_tl(t2
, t0
, t2
);
2834 tcg_gen_andc_tl(t1
, t2
, t1
);
2836 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2838 /* operands of same sign, result different sign */
2839 generate_exception(ctx
, EXCP_OVERFLOW
);
2841 gen_store_gpr(t0
, rd
);
2846 if (rs
!= 0 && rt
!= 0) {
2847 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2848 } else if (rs
== 0 && rt
!= 0) {
2849 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2850 } else if (rs
!= 0 && rt
== 0) {
2851 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2853 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2858 TCGv t0
= tcg_temp_local_new();
2859 TCGv t1
= tcg_temp_new();
2860 TCGv t2
= tcg_temp_new();
2861 TCGLabel
*l1
= gen_new_label();
2863 gen_load_gpr(t1
, rs
);
2864 gen_load_gpr(t2
, rt
);
2865 tcg_gen_sub_tl(t0
, t1
, t2
);
2866 tcg_gen_xor_tl(t2
, t1
, t2
);
2867 tcg_gen_xor_tl(t1
, t0
, t1
);
2868 tcg_gen_and_tl(t1
, t1
, t2
);
2870 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2872 /* operands of different sign, first operand and result different sign */
2873 generate_exception(ctx
, EXCP_OVERFLOW
);
2875 gen_store_gpr(t0
, rd
);
2880 if (rs
!= 0 && rt
!= 0) {
2881 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 } else if (rs
== 0 && rt
!= 0) {
2883 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2884 } else if (rs
!= 0 && rt
== 0) {
2885 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2887 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2892 if (likely(rs
!= 0 && rt
!= 0)) {
2893 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2894 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2896 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2902 /* Conditional move */
2903 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2904 int rd
, int rs
, int rt
)
2909 /* If no destination, treat it as a NOP. */
2913 t0
= tcg_temp_new();
2914 gen_load_gpr(t0
, rt
);
2915 t1
= tcg_const_tl(0);
2916 t2
= tcg_temp_new();
2917 gen_load_gpr(t2
, rs
);
2920 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2923 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2926 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2929 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2938 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2939 int rd
, int rs
, int rt
)
2942 /* If no destination, treat it as a NOP. */
2948 if (likely(rs
!= 0 && rt
!= 0)) {
2949 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2951 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2955 if (rs
!= 0 && rt
!= 0) {
2956 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2957 } else if (rs
== 0 && rt
!= 0) {
2958 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2959 } else if (rs
!= 0 && rt
== 0) {
2960 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2962 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2966 if (likely(rs
!= 0 && rt
!= 0)) {
2967 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2968 } else if (rs
== 0 && rt
!= 0) {
2969 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2970 } else if (rs
!= 0 && rt
== 0) {
2971 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2973 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2977 if (likely(rs
!= 0 && rt
!= 0)) {
2978 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2979 } else if (rs
== 0 && rt
!= 0) {
2980 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2981 } else if (rs
!= 0 && rt
== 0) {
2982 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2984 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2990 /* Set on lower than */
2991 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2992 int rd
, int rs
, int rt
)
2997 /* If no destination, treat it as a NOP. */
3001 t0
= tcg_temp_new();
3002 t1
= tcg_temp_new();
3003 gen_load_gpr(t0
, rs
);
3004 gen_load_gpr(t1
, rt
);
3007 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3010 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3018 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3019 int rd
, int rs
, int rt
)
3024 /* If no destination, treat it as a NOP.
3025 For add & sub, we must generate the overflow exception when needed. */
3029 t0
= tcg_temp_new();
3030 t1
= tcg_temp_new();
3031 gen_load_gpr(t0
, rs
);
3032 gen_load_gpr(t1
, rt
);
3035 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3036 tcg_gen_shl_tl(t0
, t1
, t0
);
3037 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3040 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3041 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3044 tcg_gen_ext32u_tl(t1
, t1
);
3045 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3046 tcg_gen_shr_tl(t0
, t1
, t0
);
3047 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3051 TCGv_i32 t2
= tcg_temp_new_i32();
3052 TCGv_i32 t3
= tcg_temp_new_i32();
3054 tcg_gen_trunc_tl_i32(t2
, t0
);
3055 tcg_gen_trunc_tl_i32(t3
, t1
);
3056 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3057 tcg_gen_rotr_i32(t2
, t3
, t2
);
3058 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3059 tcg_temp_free_i32(t2
);
3060 tcg_temp_free_i32(t3
);
3063 #if defined(TARGET_MIPS64)
3065 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3066 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3069 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3070 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3073 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3074 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3077 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3078 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3086 /* Arithmetic on HI/LO registers */
3087 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3089 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3100 #if defined(TARGET_MIPS64)
3102 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3106 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3110 #if defined(TARGET_MIPS64)
3112 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3116 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3121 #if defined(TARGET_MIPS64)
3123 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3127 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3130 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3135 #if defined(TARGET_MIPS64)
3137 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3141 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3144 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3150 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3153 TCGv t0
= tcg_const_tl(addr
);
3154 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3155 gen_store_gpr(t0
, reg
);
3159 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3165 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3168 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3169 addr
= addr_add(ctx
, pc
, offset
);
3170 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3174 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3175 addr
= addr_add(ctx
, pc
, offset
);
3176 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3178 #if defined(TARGET_MIPS64)
3181 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3182 addr
= addr_add(ctx
, pc
, offset
);
3183 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3187 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3190 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3191 addr
= addr_add(ctx
, pc
, offset
);
3192 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3197 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3198 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3199 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3202 #if defined(TARGET_MIPS64)
3203 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3204 case R6_OPC_LDPC
+ (1 << 16):
3205 case R6_OPC_LDPC
+ (2 << 16):
3206 case R6_OPC_LDPC
+ (3 << 16):
3208 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3209 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3210 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3214 MIPS_INVAL("OPC_PCREL");
3215 generate_exception_end(ctx
, EXCP_RI
);
3222 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3231 t0
= tcg_temp_new();
3232 t1
= tcg_temp_new();
3234 gen_load_gpr(t0
, rs
);
3235 gen_load_gpr(t1
, rt
);
3240 TCGv t2
= tcg_temp_new();
3241 TCGv t3
= tcg_temp_new();
3242 tcg_gen_ext32s_tl(t0
, t0
);
3243 tcg_gen_ext32s_tl(t1
, t1
);
3244 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3245 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3246 tcg_gen_and_tl(t2
, t2
, t3
);
3247 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3248 tcg_gen_or_tl(t2
, t2
, t3
);
3249 tcg_gen_movi_tl(t3
, 0);
3250 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3251 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3252 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3259 TCGv t2
= tcg_temp_new();
3260 TCGv t3
= tcg_temp_new();
3261 tcg_gen_ext32s_tl(t0
, t0
);
3262 tcg_gen_ext32s_tl(t1
, t1
);
3263 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3264 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3265 tcg_gen_and_tl(t2
, t2
, t3
);
3266 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3267 tcg_gen_or_tl(t2
, t2
, t3
);
3268 tcg_gen_movi_tl(t3
, 0);
3269 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3270 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3271 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3278 TCGv t2
= tcg_const_tl(0);
3279 TCGv t3
= tcg_const_tl(1);
3280 tcg_gen_ext32u_tl(t0
, t0
);
3281 tcg_gen_ext32u_tl(t1
, t1
);
3282 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3283 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3284 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3291 TCGv t2
= tcg_const_tl(0);
3292 TCGv t3
= tcg_const_tl(1);
3293 tcg_gen_ext32u_tl(t0
, t0
);
3294 tcg_gen_ext32u_tl(t1
, t1
);
3295 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3296 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3297 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3304 TCGv_i32 t2
= tcg_temp_new_i32();
3305 TCGv_i32 t3
= tcg_temp_new_i32();
3306 tcg_gen_trunc_tl_i32(t2
, t0
);
3307 tcg_gen_trunc_tl_i32(t3
, t1
);
3308 tcg_gen_mul_i32(t2
, t2
, t3
);
3309 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3310 tcg_temp_free_i32(t2
);
3311 tcg_temp_free_i32(t3
);
3316 TCGv_i32 t2
= tcg_temp_new_i32();
3317 TCGv_i32 t3
= tcg_temp_new_i32();
3318 tcg_gen_trunc_tl_i32(t2
, t0
);
3319 tcg_gen_trunc_tl_i32(t3
, t1
);
3320 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3321 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3322 tcg_temp_free_i32(t2
);
3323 tcg_temp_free_i32(t3
);
3328 TCGv_i32 t2
= tcg_temp_new_i32();
3329 TCGv_i32 t3
= tcg_temp_new_i32();
3330 tcg_gen_trunc_tl_i32(t2
, t0
);
3331 tcg_gen_trunc_tl_i32(t3
, t1
);
3332 tcg_gen_mul_i32(t2
, t2
, t3
);
3333 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3334 tcg_temp_free_i32(t2
);
3335 tcg_temp_free_i32(t3
);
3340 TCGv_i32 t2
= tcg_temp_new_i32();
3341 TCGv_i32 t3
= tcg_temp_new_i32();
3342 tcg_gen_trunc_tl_i32(t2
, t0
);
3343 tcg_gen_trunc_tl_i32(t3
, t1
);
3344 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3345 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3346 tcg_temp_free_i32(t2
);
3347 tcg_temp_free_i32(t3
);
3350 #if defined(TARGET_MIPS64)
3353 TCGv t2
= tcg_temp_new();
3354 TCGv t3
= tcg_temp_new();
3355 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3356 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3357 tcg_gen_and_tl(t2
, t2
, t3
);
3358 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3359 tcg_gen_or_tl(t2
, t2
, t3
);
3360 tcg_gen_movi_tl(t3
, 0);
3361 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3362 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3369 TCGv t2
= tcg_temp_new();
3370 TCGv t3
= tcg_temp_new();
3371 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3372 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3373 tcg_gen_and_tl(t2
, t2
, t3
);
3374 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3375 tcg_gen_or_tl(t2
, t2
, t3
);
3376 tcg_gen_movi_tl(t3
, 0);
3377 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3378 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3385 TCGv t2
= tcg_const_tl(0);
3386 TCGv t3
= tcg_const_tl(1);
3387 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3388 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3395 TCGv t2
= tcg_const_tl(0);
3396 TCGv t3
= tcg_const_tl(1);
3397 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3398 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3404 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3408 TCGv t2
= tcg_temp_new();
3409 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3414 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3418 TCGv t2
= tcg_temp_new();
3419 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3425 MIPS_INVAL("r6 mul/div");
3426 generate_exception_end(ctx
, EXCP_RI
);
3434 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3435 int acc
, int rs
, int rt
)
3439 t0
= tcg_temp_new();
3440 t1
= tcg_temp_new();
3442 gen_load_gpr(t0
, rs
);
3443 gen_load_gpr(t1
, rt
);
3452 TCGv t2
= tcg_temp_new();
3453 TCGv t3
= tcg_temp_new();
3454 tcg_gen_ext32s_tl(t0
, t0
);
3455 tcg_gen_ext32s_tl(t1
, t1
);
3456 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3457 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3458 tcg_gen_and_tl(t2
, t2
, t3
);
3459 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3460 tcg_gen_or_tl(t2
, t2
, t3
);
3461 tcg_gen_movi_tl(t3
, 0);
3462 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3463 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3464 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3465 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3466 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3473 TCGv t2
= tcg_const_tl(0);
3474 TCGv t3
= tcg_const_tl(1);
3475 tcg_gen_ext32u_tl(t0
, t0
);
3476 tcg_gen_ext32u_tl(t1
, t1
);
3477 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3478 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3479 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3480 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3481 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3488 TCGv_i32 t2
= tcg_temp_new_i32();
3489 TCGv_i32 t3
= tcg_temp_new_i32();
3490 tcg_gen_trunc_tl_i32(t2
, t0
);
3491 tcg_gen_trunc_tl_i32(t3
, t1
);
3492 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3493 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3494 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3495 tcg_temp_free_i32(t2
);
3496 tcg_temp_free_i32(t3
);
3501 TCGv_i32 t2
= tcg_temp_new_i32();
3502 TCGv_i32 t3
= tcg_temp_new_i32();
3503 tcg_gen_trunc_tl_i32(t2
, t0
);
3504 tcg_gen_trunc_tl_i32(t3
, t1
);
3505 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3506 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3507 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3508 tcg_temp_free_i32(t2
);
3509 tcg_temp_free_i32(t3
);
3512 #if defined(TARGET_MIPS64)
3515 TCGv t2
= tcg_temp_new();
3516 TCGv t3
= tcg_temp_new();
3517 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3518 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3519 tcg_gen_and_tl(t2
, t2
, t3
);
3520 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3521 tcg_gen_or_tl(t2
, t2
, t3
);
3522 tcg_gen_movi_tl(t3
, 0);
3523 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3524 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3525 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3532 TCGv t2
= tcg_const_tl(0);
3533 TCGv t3
= tcg_const_tl(1);
3534 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3535 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3536 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3542 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3545 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3550 TCGv_i64 t2
= tcg_temp_new_i64();
3551 TCGv_i64 t3
= tcg_temp_new_i64();
3553 tcg_gen_ext_tl_i64(t2
, t0
);
3554 tcg_gen_ext_tl_i64(t3
, t1
);
3555 tcg_gen_mul_i64(t2
, t2
, t3
);
3556 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3557 tcg_gen_add_i64(t2
, t2
, t3
);
3558 tcg_temp_free_i64(t3
);
3559 gen_move_low32(cpu_LO
[acc
], t2
);
3560 gen_move_high32(cpu_HI
[acc
], t2
);
3561 tcg_temp_free_i64(t2
);
3566 TCGv_i64 t2
= tcg_temp_new_i64();
3567 TCGv_i64 t3
= tcg_temp_new_i64();
3569 tcg_gen_ext32u_tl(t0
, t0
);
3570 tcg_gen_ext32u_tl(t1
, t1
);
3571 tcg_gen_extu_tl_i64(t2
, t0
);
3572 tcg_gen_extu_tl_i64(t3
, t1
);
3573 tcg_gen_mul_i64(t2
, t2
, t3
);
3574 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3575 tcg_gen_add_i64(t2
, t2
, t3
);
3576 tcg_temp_free_i64(t3
);
3577 gen_move_low32(cpu_LO
[acc
], t2
);
3578 gen_move_high32(cpu_HI
[acc
], t2
);
3579 tcg_temp_free_i64(t2
);
3584 TCGv_i64 t2
= tcg_temp_new_i64();
3585 TCGv_i64 t3
= tcg_temp_new_i64();
3587 tcg_gen_ext_tl_i64(t2
, t0
);
3588 tcg_gen_ext_tl_i64(t3
, t1
);
3589 tcg_gen_mul_i64(t2
, t2
, t3
);
3590 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3591 tcg_gen_sub_i64(t2
, t3
, t2
);
3592 tcg_temp_free_i64(t3
);
3593 gen_move_low32(cpu_LO
[acc
], t2
);
3594 gen_move_high32(cpu_HI
[acc
], t2
);
3595 tcg_temp_free_i64(t2
);
3600 TCGv_i64 t2
= tcg_temp_new_i64();
3601 TCGv_i64 t3
= tcg_temp_new_i64();
3603 tcg_gen_ext32u_tl(t0
, t0
);
3604 tcg_gen_ext32u_tl(t1
, t1
);
3605 tcg_gen_extu_tl_i64(t2
, t0
);
3606 tcg_gen_extu_tl_i64(t3
, t1
);
3607 tcg_gen_mul_i64(t2
, t2
, t3
);
3608 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3609 tcg_gen_sub_i64(t2
, t3
, t2
);
3610 tcg_temp_free_i64(t3
);
3611 gen_move_low32(cpu_LO
[acc
], t2
);
3612 gen_move_high32(cpu_HI
[acc
], t2
);
3613 tcg_temp_free_i64(t2
);
3617 MIPS_INVAL("mul/div");
3618 generate_exception_end(ctx
, EXCP_RI
);
3626 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3627 int rd
, int rs
, int rt
)
3629 TCGv t0
= tcg_temp_new();
3630 TCGv t1
= tcg_temp_new();
3632 gen_load_gpr(t0
, rs
);
3633 gen_load_gpr(t1
, rt
);
3636 case OPC_VR54XX_MULS
:
3637 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3639 case OPC_VR54XX_MULSU
:
3640 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3642 case OPC_VR54XX_MACC
:
3643 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3645 case OPC_VR54XX_MACCU
:
3646 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3648 case OPC_VR54XX_MSAC
:
3649 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3651 case OPC_VR54XX_MSACU
:
3652 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3654 case OPC_VR54XX_MULHI
:
3655 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3657 case OPC_VR54XX_MULHIU
:
3658 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3660 case OPC_VR54XX_MULSHI
:
3661 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3663 case OPC_VR54XX_MULSHIU
:
3664 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3666 case OPC_VR54XX_MACCHI
:
3667 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3669 case OPC_VR54XX_MACCHIU
:
3670 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3672 case OPC_VR54XX_MSACHI
:
3673 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3675 case OPC_VR54XX_MSACHIU
:
3676 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3679 MIPS_INVAL("mul vr54xx");
3680 generate_exception_end(ctx
, EXCP_RI
);
3683 gen_store_gpr(t0
, rd
);
3690 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3700 gen_load_gpr(t0
, rs
);
3705 #if defined(TARGET_MIPS64)
3709 tcg_gen_not_tl(t0
, t0
);
3718 tcg_gen_ext32u_tl(t0
, t0
);
3719 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3720 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3722 #if defined(TARGET_MIPS64)
3727 tcg_gen_clzi_i64(t0
, t0
, 64);
3733 /* Godson integer instructions */
3734 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3735 int rd
, int rs
, int rt
)
3747 case OPC_MULTU_G_2E
:
3748 case OPC_MULTU_G_2F
:
3749 #if defined(TARGET_MIPS64)
3750 case OPC_DMULT_G_2E
:
3751 case OPC_DMULT_G_2F
:
3752 case OPC_DMULTU_G_2E
:
3753 case OPC_DMULTU_G_2F
:
3755 t0
= tcg_temp_new();
3756 t1
= tcg_temp_new();
3759 t0
= tcg_temp_local_new();
3760 t1
= tcg_temp_local_new();
3764 gen_load_gpr(t0
, rs
);
3765 gen_load_gpr(t1
, rt
);
3770 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3773 case OPC_MULTU_G_2E
:
3774 case OPC_MULTU_G_2F
:
3775 tcg_gen_ext32u_tl(t0
, t0
);
3776 tcg_gen_ext32u_tl(t1
, t1
);
3777 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3778 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3783 TCGLabel
*l1
= gen_new_label();
3784 TCGLabel
*l2
= gen_new_label();
3785 TCGLabel
*l3
= gen_new_label();
3786 tcg_gen_ext32s_tl(t0
, t0
);
3787 tcg_gen_ext32s_tl(t1
, t1
);
3788 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3792 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3793 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3794 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3797 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3798 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3805 TCGLabel
*l1
= gen_new_label();
3806 TCGLabel
*l2
= gen_new_label();
3807 tcg_gen_ext32u_tl(t0
, t0
);
3808 tcg_gen_ext32u_tl(t1
, t1
);
3809 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3810 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3813 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3814 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3821 TCGLabel
*l1
= gen_new_label();
3822 TCGLabel
*l2
= gen_new_label();
3823 TCGLabel
*l3
= gen_new_label();
3824 tcg_gen_ext32u_tl(t0
, t0
);
3825 tcg_gen_ext32u_tl(t1
, t1
);
3826 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3827 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3830 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3833 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3834 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3841 TCGLabel
*l1
= gen_new_label();
3842 TCGLabel
*l2
= gen_new_label();
3843 tcg_gen_ext32u_tl(t0
, t0
);
3844 tcg_gen_ext32u_tl(t1
, t1
);
3845 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3846 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3849 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3850 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3854 #if defined(TARGET_MIPS64)
3855 case OPC_DMULT_G_2E
:
3856 case OPC_DMULT_G_2F
:
3857 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3859 case OPC_DMULTU_G_2E
:
3860 case OPC_DMULTU_G_2F
:
3861 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3866 TCGLabel
*l1
= gen_new_label();
3867 TCGLabel
*l2
= gen_new_label();
3868 TCGLabel
*l3
= gen_new_label();
3869 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3873 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3874 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3875 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3878 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3882 case OPC_DDIVU_G_2E
:
3883 case OPC_DDIVU_G_2F
:
3885 TCGLabel
*l1
= gen_new_label();
3886 TCGLabel
*l2
= gen_new_label();
3887 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3888 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3891 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3898 TCGLabel
*l1
= gen_new_label();
3899 TCGLabel
*l2
= gen_new_label();
3900 TCGLabel
*l3
= gen_new_label();
3901 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3902 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3903 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3905 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3908 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3912 case OPC_DMODU_G_2E
:
3913 case OPC_DMODU_G_2F
:
3915 TCGLabel
*l1
= gen_new_label();
3916 TCGLabel
*l2
= gen_new_label();
3917 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3918 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3921 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3932 /* Loongson multimedia instructions */
3933 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3935 uint32_t opc
, shift_max
;
3938 opc
= MASK_LMI(ctx
->opcode
);
3944 t0
= tcg_temp_local_new_i64();
3945 t1
= tcg_temp_local_new_i64();
3948 t0
= tcg_temp_new_i64();
3949 t1
= tcg_temp_new_i64();
3953 check_cp1_enabled(ctx
);
3954 gen_load_fpr64(ctx
, t0
, rs
);
3955 gen_load_fpr64(ctx
, t1
, rt
);
3957 #define LMI_HELPER(UP, LO) \
3958 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3959 #define LMI_HELPER_1(UP, LO) \
3960 case OPC_##UP: gen_helper_##LO(t0, t0); break
3961 #define LMI_DIRECT(UP, LO, OP) \
3962 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3965 LMI_HELPER(PADDSH
, paddsh
);
3966 LMI_HELPER(PADDUSH
, paddush
);
3967 LMI_HELPER(PADDH
, paddh
);
3968 LMI_HELPER(PADDW
, paddw
);
3969 LMI_HELPER(PADDSB
, paddsb
);
3970 LMI_HELPER(PADDUSB
, paddusb
);
3971 LMI_HELPER(PADDB
, paddb
);
3973 LMI_HELPER(PSUBSH
, psubsh
);
3974 LMI_HELPER(PSUBUSH
, psubush
);
3975 LMI_HELPER(PSUBH
, psubh
);
3976 LMI_HELPER(PSUBW
, psubw
);
3977 LMI_HELPER(PSUBSB
, psubsb
);
3978 LMI_HELPER(PSUBUSB
, psubusb
);
3979 LMI_HELPER(PSUBB
, psubb
);
3981 LMI_HELPER(PSHUFH
, pshufh
);
3982 LMI_HELPER(PACKSSWH
, packsswh
);
3983 LMI_HELPER(PACKSSHB
, packsshb
);
3984 LMI_HELPER(PACKUSHB
, packushb
);
3986 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3987 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3988 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3989 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3990 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3991 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3993 LMI_HELPER(PAVGH
, pavgh
);
3994 LMI_HELPER(PAVGB
, pavgb
);
3995 LMI_HELPER(PMAXSH
, pmaxsh
);
3996 LMI_HELPER(PMINSH
, pminsh
);
3997 LMI_HELPER(PMAXUB
, pmaxub
);
3998 LMI_HELPER(PMINUB
, pminub
);
4000 LMI_HELPER(PCMPEQW
, pcmpeqw
);
4001 LMI_HELPER(PCMPGTW
, pcmpgtw
);
4002 LMI_HELPER(PCMPEQH
, pcmpeqh
);
4003 LMI_HELPER(PCMPGTH
, pcmpgth
);
4004 LMI_HELPER(PCMPEQB
, pcmpeqb
);
4005 LMI_HELPER(PCMPGTB
, pcmpgtb
);
4007 LMI_HELPER(PSLLW
, psllw
);
4008 LMI_HELPER(PSLLH
, psllh
);
4009 LMI_HELPER(PSRLW
, psrlw
);
4010 LMI_HELPER(PSRLH
, psrlh
);
4011 LMI_HELPER(PSRAW
, psraw
);
4012 LMI_HELPER(PSRAH
, psrah
);
4014 LMI_HELPER(PMULLH
, pmullh
);
4015 LMI_HELPER(PMULHH
, pmulhh
);
4016 LMI_HELPER(PMULHUH
, pmulhuh
);
4017 LMI_HELPER(PMADDHW
, pmaddhw
);
4019 LMI_HELPER(PASUBUB
, pasubub
);
4020 LMI_HELPER_1(BIADD
, biadd
);
4021 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
4023 LMI_DIRECT(PADDD
, paddd
, add
);
4024 LMI_DIRECT(PSUBD
, psubd
, sub
);
4025 LMI_DIRECT(XOR_CP2
, xor, xor);
4026 LMI_DIRECT(NOR_CP2
, nor
, nor
);
4027 LMI_DIRECT(AND_CP2
, and, and);
4028 LMI_DIRECT(OR_CP2
, or, or);
4031 tcg_gen_andc_i64(t0
, t1
, t0
);
4035 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4038 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4041 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4044 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4048 tcg_gen_andi_i64(t1
, t1
, 3);
4049 tcg_gen_shli_i64(t1
, t1
, 4);
4050 tcg_gen_shr_i64(t0
, t0
, t1
);
4051 tcg_gen_ext16u_i64(t0
, t0
);
4055 tcg_gen_add_i64(t0
, t0
, t1
);
4056 tcg_gen_ext32s_i64(t0
, t0
);
4059 tcg_gen_sub_i64(t0
, t0
, t1
);
4060 tcg_gen_ext32s_i64(t0
, t0
);
4082 /* Make sure shift count isn't TCG undefined behaviour. */
4083 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4088 tcg_gen_shl_i64(t0
, t0
, t1
);
4092 /* Since SRA is UndefinedResult without sign-extended inputs,
4093 we can treat SRA and DSRA the same. */
4094 tcg_gen_sar_i64(t0
, t0
, t1
);
4097 /* We want to shift in zeros for SRL; zero-extend first. */
4098 tcg_gen_ext32u_i64(t0
, t0
);
4101 tcg_gen_shr_i64(t0
, t0
, t1
);
4105 if (shift_max
== 32) {
4106 tcg_gen_ext32s_i64(t0
, t0
);
4109 /* Shifts larger than MAX produce zero. */
4110 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4111 tcg_gen_neg_i64(t1
, t1
);
4112 tcg_gen_and_i64(t0
, t0
, t1
);
4118 TCGv_i64 t2
= tcg_temp_new_i64();
4119 TCGLabel
*lab
= gen_new_label();
4121 tcg_gen_mov_i64(t2
, t0
);
4122 tcg_gen_add_i64(t0
, t1
, t2
);
4123 if (opc
== OPC_ADD_CP2
) {
4124 tcg_gen_ext32s_i64(t0
, t0
);
4126 tcg_gen_xor_i64(t1
, t1
, t2
);
4127 tcg_gen_xor_i64(t2
, t2
, t0
);
4128 tcg_gen_andc_i64(t1
, t2
, t1
);
4129 tcg_temp_free_i64(t2
);
4130 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4131 generate_exception(ctx
, EXCP_OVERFLOW
);
4139 TCGv_i64 t2
= tcg_temp_new_i64();
4140 TCGLabel
*lab
= gen_new_label();
4142 tcg_gen_mov_i64(t2
, t0
);
4143 tcg_gen_sub_i64(t0
, t1
, t2
);
4144 if (opc
== OPC_SUB_CP2
) {
4145 tcg_gen_ext32s_i64(t0
, t0
);
4147 tcg_gen_xor_i64(t1
, t1
, t2
);
4148 tcg_gen_xor_i64(t2
, t2
, t0
);
4149 tcg_gen_and_i64(t1
, t1
, t2
);
4150 tcg_temp_free_i64(t2
);
4151 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4152 generate_exception(ctx
, EXCP_OVERFLOW
);
4158 tcg_gen_ext32u_i64(t0
, t0
);
4159 tcg_gen_ext32u_i64(t1
, t1
);
4160 tcg_gen_mul_i64(t0
, t0
, t1
);
4169 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4170 FD field is the CC field? */
4172 MIPS_INVAL("loongson_cp2");
4173 generate_exception_end(ctx
, EXCP_RI
);
4180 gen_store_fpr64(ctx
, t0
, rd
);
4182 tcg_temp_free_i64(t0
);
4183 tcg_temp_free_i64(t1
);
4187 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4188 int rs
, int rt
, int16_t imm
)
4191 TCGv t0
= tcg_temp_new();
4192 TCGv t1
= tcg_temp_new();
4195 /* Load needed operands */
4203 /* Compare two registers */
4205 gen_load_gpr(t0
, rs
);
4206 gen_load_gpr(t1
, rt
);
4216 /* Compare register to immediate */
4217 if (rs
!= 0 || imm
!= 0) {
4218 gen_load_gpr(t0
, rs
);
4219 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4226 case OPC_TEQ
: /* rs == rs */
4227 case OPC_TEQI
: /* r0 == 0 */
4228 case OPC_TGE
: /* rs >= rs */
4229 case OPC_TGEI
: /* r0 >= 0 */
4230 case OPC_TGEU
: /* rs >= rs unsigned */
4231 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4233 generate_exception_end(ctx
, EXCP_TRAP
);
4235 case OPC_TLT
: /* rs < rs */
4236 case OPC_TLTI
: /* r0 < 0 */
4237 case OPC_TLTU
: /* rs < rs unsigned */
4238 case OPC_TLTIU
: /* r0 < 0 unsigned */
4239 case OPC_TNE
: /* rs != rs */
4240 case OPC_TNEI
: /* r0 != 0 */
4241 /* Never trap: treat as NOP. */
4245 TCGLabel
*l1
= gen_new_label();
4250 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4254 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4258 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4262 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4266 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4270 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4273 generate_exception(ctx
, EXCP_TRAP
);
4280 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4282 if (unlikely(ctx
->singlestep_enabled
)) {
4286 #ifndef CONFIG_USER_ONLY
4287 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4293 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4295 if (use_goto_tb(ctx
, dest
)) {
4298 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4301 if (ctx
->singlestep_enabled
) {
4302 save_cpu_state(ctx
, 0);
4303 gen_helper_raise_exception_debug(cpu_env
);
4305 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
4309 /* Branches (before delay slot) */
4310 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4312 int rs
, int rt
, int32_t offset
,
4315 target_ulong btgt
= -1;
4317 int bcond_compute
= 0;
4318 TCGv t0
= tcg_temp_new();
4319 TCGv t1
= tcg_temp_new();
4321 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4322 #ifdef MIPS_DEBUG_DISAS
4323 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4324 TARGET_FMT_lx
"\n", ctx
->pc
);
4326 generate_exception_end(ctx
, EXCP_RI
);
4330 /* Load needed operands */
4336 /* Compare two registers */
4338 gen_load_gpr(t0
, rs
);
4339 gen_load_gpr(t1
, rt
);
4342 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4356 /* Compare to zero */
4358 gen_load_gpr(t0
, rs
);
4361 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4364 #if defined(TARGET_MIPS64)
4366 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4368 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4371 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4376 /* Jump to immediate */
4377 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4381 /* Jump to register */
4382 if (offset
!= 0 && offset
!= 16) {
4383 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4384 others are reserved. */
4385 MIPS_INVAL("jump hint");
4386 generate_exception_end(ctx
, EXCP_RI
);
4389 gen_load_gpr(btarget
, rs
);
4392 MIPS_INVAL("branch/jump");
4393 generate_exception_end(ctx
, EXCP_RI
);
4396 if (bcond_compute
== 0) {
4397 /* No condition to be computed */
4399 case OPC_BEQ
: /* rx == rx */
4400 case OPC_BEQL
: /* rx == rx likely */
4401 case OPC_BGEZ
: /* 0 >= 0 */
4402 case OPC_BGEZL
: /* 0 >= 0 likely */
4403 case OPC_BLEZ
: /* 0 <= 0 */
4404 case OPC_BLEZL
: /* 0 <= 0 likely */
4406 ctx
->hflags
|= MIPS_HFLAG_B
;
4408 case OPC_BGEZAL
: /* 0 >= 0 */
4409 case OPC_BGEZALL
: /* 0 >= 0 likely */
4410 /* Always take and link */
4412 ctx
->hflags
|= MIPS_HFLAG_B
;
4414 case OPC_BNE
: /* rx != rx */
4415 case OPC_BGTZ
: /* 0 > 0 */
4416 case OPC_BLTZ
: /* 0 < 0 */
4419 case OPC_BLTZAL
: /* 0 < 0 */
4420 /* Handle as an unconditional branch to get correct delay
4423 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4424 ctx
->hflags
|= MIPS_HFLAG_B
;
4426 case OPC_BLTZALL
: /* 0 < 0 likely */
4427 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4428 /* Skip the instruction in the delay slot */
4431 case OPC_BNEL
: /* rx != rx likely */
4432 case OPC_BGTZL
: /* 0 > 0 likely */
4433 case OPC_BLTZL
: /* 0 < 0 likely */
4434 /* Skip the instruction in the delay slot */
4438 ctx
->hflags
|= MIPS_HFLAG_B
;
4441 ctx
->hflags
|= MIPS_HFLAG_BX
;
4445 ctx
->hflags
|= MIPS_HFLAG_B
;
4448 ctx
->hflags
|= MIPS_HFLAG_BR
;
4452 ctx
->hflags
|= MIPS_HFLAG_BR
;
4455 MIPS_INVAL("branch/jump");
4456 generate_exception_end(ctx
, EXCP_RI
);
4462 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4465 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4468 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4471 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4474 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4477 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4480 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4484 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4488 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4491 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4494 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4497 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4500 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4503 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4506 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4508 #if defined(TARGET_MIPS64)
4510 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4514 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4517 ctx
->hflags
|= MIPS_HFLAG_BC
;
4520 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4523 ctx
->hflags
|= MIPS_HFLAG_BL
;
4526 MIPS_INVAL("conditional branch/jump");
4527 generate_exception_end(ctx
, EXCP_RI
);
4532 ctx
->btarget
= btgt
;
4534 switch (delayslot_size
) {
4536 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4539 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4544 int post_delay
= insn_bytes
+ delayslot_size
;
4545 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4547 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4551 if (insn_bytes
== 2)
4552 ctx
->hflags
|= MIPS_HFLAG_B16
;
4557 /* special3 bitfield operations */
4558 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4559 int rs
, int lsb
, int msb
)
4561 TCGv t0
= tcg_temp_new();
4562 TCGv t1
= tcg_temp_new();
4564 gen_load_gpr(t1
, rs
);
4567 if (lsb
+ msb
> 31) {
4571 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4573 /* The two checks together imply that lsb == 0,
4574 so this is a simple sign-extension. */
4575 tcg_gen_ext32s_tl(t0
, t1
);
4578 #if defined(TARGET_MIPS64)
4587 if (lsb
+ msb
> 63) {
4590 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4597 gen_load_gpr(t0
, rt
);
4598 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4599 tcg_gen_ext32s_tl(t0
, t0
);
4601 #if defined(TARGET_MIPS64)
4612 gen_load_gpr(t0
, rt
);
4613 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4618 MIPS_INVAL("bitops");
4619 generate_exception_end(ctx
, EXCP_RI
);
4624 gen_store_gpr(t0
, rt
);
4629 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4634 /* If no destination, treat it as a NOP. */
4638 t0
= tcg_temp_new();
4639 gen_load_gpr(t0
, rt
);
4643 TCGv t1
= tcg_temp_new();
4644 TCGv t2
= tcg_const_tl(0x00FF00FF);
4646 tcg_gen_shri_tl(t1
, t0
, 8);
4647 tcg_gen_and_tl(t1
, t1
, t2
);
4648 tcg_gen_and_tl(t0
, t0
, t2
);
4649 tcg_gen_shli_tl(t0
, t0
, 8);
4650 tcg_gen_or_tl(t0
, t0
, t1
);
4653 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4657 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4660 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4662 #if defined(TARGET_MIPS64)
4665 TCGv t1
= tcg_temp_new();
4666 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4668 tcg_gen_shri_tl(t1
, t0
, 8);
4669 tcg_gen_and_tl(t1
, t1
, t2
);
4670 tcg_gen_and_tl(t0
, t0
, t2
);
4671 tcg_gen_shli_tl(t0
, t0
, 8);
4672 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4679 TCGv t1
= tcg_temp_new();
4680 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4682 tcg_gen_shri_tl(t1
, t0
, 16);
4683 tcg_gen_and_tl(t1
, t1
, t2
);
4684 tcg_gen_and_tl(t0
, t0
, t2
);
4685 tcg_gen_shli_tl(t0
, t0
, 16);
4686 tcg_gen_or_tl(t0
, t0
, t1
);
4687 tcg_gen_shri_tl(t1
, t0
, 32);
4688 tcg_gen_shli_tl(t0
, t0
, 32);
4689 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4696 MIPS_INVAL("bsfhl");
4697 generate_exception_end(ctx
, EXCP_RI
);
4704 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4713 t0
= tcg_temp_new();
4714 t1
= tcg_temp_new();
4715 gen_load_gpr(t0
, rs
);
4716 gen_load_gpr(t1
, rt
);
4717 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4718 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4719 if (opc
== OPC_LSA
) {
4720 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4729 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4737 t0
= tcg_temp_new();
4738 gen_load_gpr(t0
, rt
);
4742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4744 #if defined(TARGET_MIPS64)
4746 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4751 TCGv t1
= tcg_temp_new();
4752 gen_load_gpr(t1
, rs
);
4756 TCGv_i64 t2
= tcg_temp_new_i64();
4757 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4758 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4759 gen_move_low32(cpu_gpr
[rd
], t2
);
4760 tcg_temp_free_i64(t2
);
4763 #if defined(TARGET_MIPS64)
4765 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4766 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4767 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4777 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4784 t0
= tcg_temp_new();
4785 gen_load_gpr(t0
, rt
);
4788 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4790 #if defined(TARGET_MIPS64)
4792 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4799 #ifndef CONFIG_USER_ONLY
4800 /* CP0 (MMU and control) */
4801 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4803 TCGv_i64 t0
= tcg_temp_new_i64();
4804 TCGv_i64 t1
= tcg_temp_new_i64();
4806 tcg_gen_ext_tl_i64(t0
, arg
);
4807 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4808 #if defined(TARGET_MIPS64)
4809 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4811 tcg_gen_concat32_i64(t1
, t1
, t0
);
4813 tcg_gen_st_i64(t1
, cpu_env
, off
);
4814 tcg_temp_free_i64(t1
);
4815 tcg_temp_free_i64(t0
);
4818 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4820 TCGv_i64 t0
= tcg_temp_new_i64();
4821 TCGv_i64 t1
= tcg_temp_new_i64();
4823 tcg_gen_ext_tl_i64(t0
, arg
);
4824 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4825 tcg_gen_concat32_i64(t1
, t1
, t0
);
4826 tcg_gen_st_i64(t1
, cpu_env
, off
);
4827 tcg_temp_free_i64(t1
);
4828 tcg_temp_free_i64(t0
);
4831 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4833 TCGv_i64 t0
= tcg_temp_new_i64();
4835 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4836 #if defined(TARGET_MIPS64)
4837 tcg_gen_shri_i64(t0
, t0
, 30);
4839 tcg_gen_shri_i64(t0
, t0
, 32);
4841 gen_move_low32(arg
, t0
);
4842 tcg_temp_free_i64(t0
);
4845 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4847 TCGv_i64 t0
= tcg_temp_new_i64();
4849 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4850 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4851 gen_move_low32(arg
, t0
);
4852 tcg_temp_free_i64(t0
);
4855 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4857 TCGv_i32 t0
= tcg_temp_new_i32();
4859 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4860 tcg_gen_ext_i32_tl(arg
, t0
);
4861 tcg_temp_free_i32(t0
);
4864 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4866 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4867 tcg_gen_ext32s_tl(arg
, arg
);
4870 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4872 TCGv_i32 t0
= tcg_temp_new_i32();
4874 tcg_gen_trunc_tl_i32(t0
, arg
);
4875 tcg_gen_st_i32(t0
, cpu_env
, off
);
4876 tcg_temp_free_i32(t0
);
4879 #define CP0_CHECK(c) \
4882 goto cp0_unimplemented; \
4886 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4888 const char *rn
= "invalid";
4890 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4896 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4900 goto cp0_unimplemented
;
4906 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4910 goto cp0_unimplemented
;
4916 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4917 ctx
->CP0_LLAddr_shift
);
4921 CP0_CHECK(ctx
->mrp
);
4922 gen_helper_mfhc0_maar(arg
, cpu_env
);
4926 goto cp0_unimplemented
;
4935 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4939 goto cp0_unimplemented
;
4943 goto cp0_unimplemented
;
4945 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4949 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4950 tcg_gen_movi_tl(arg
, 0);
4953 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4955 const char *rn
= "invalid";
4956 uint64_t mask
= ctx
->PAMask
>> 36;
4958 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4964 tcg_gen_andi_tl(arg
, arg
, mask
);
4965 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4969 goto cp0_unimplemented
;
4975 tcg_gen_andi_tl(arg
, arg
, mask
);
4976 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4980 goto cp0_unimplemented
;
4986 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4987 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4988 relevant for modern MIPS cores supporting MTHC0, therefore
4989 treating MTHC0 to LLAddr as NOP. */
4993 CP0_CHECK(ctx
->mrp
);
4994 gen_helper_mthc0_maar(cpu_env
, arg
);
4998 goto cp0_unimplemented
;
5007 tcg_gen_andi_tl(arg
, arg
, mask
);
5008 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5012 goto cp0_unimplemented
;
5016 goto cp0_unimplemented
;
5018 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
5021 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5024 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5026 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
5027 tcg_gen_movi_tl(arg
, 0);
5029 tcg_gen_movi_tl(arg
, ~0);
5033 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5035 const char *rn
= "invalid";
5038 check_insn(ctx
, ISA_MIPS32
);
5044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5048 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5049 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5053 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5054 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5058 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5059 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5064 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5068 goto cp0_unimplemented
;
5074 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5075 gen_helper_mfc0_random(arg
, cpu_env
);
5079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5080 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5085 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5095 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5100 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5105 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5106 rn
= "VPEScheFBack";
5109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5110 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5114 goto cp0_unimplemented
;
5121 TCGv_i64 tmp
= tcg_temp_new_i64();
5122 tcg_gen_ld_i64(tmp
, cpu_env
,
5123 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5124 #if defined(TARGET_MIPS64)
5126 /* Move RI/XI fields to bits 31:30 */
5127 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5128 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5131 gen_move_low32(arg
, tmp
);
5132 tcg_temp_free_i64(tmp
);
5137 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5138 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5142 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5143 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5147 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5148 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5152 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5153 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5157 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5158 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5162 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5163 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5167 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5168 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5172 goto cp0_unimplemented
;
5179 TCGv_i64 tmp
= tcg_temp_new_i64();
5180 tcg_gen_ld_i64(tmp
, cpu_env
,
5181 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5182 #if defined(TARGET_MIPS64)
5184 /* Move RI/XI fields to bits 31:30 */
5185 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5186 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5189 gen_move_low32(arg
, tmp
);
5190 tcg_temp_free_i64(tmp
);
5196 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5197 rn
= "GlobalNumber";
5200 goto cp0_unimplemented
;
5206 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5207 tcg_gen_ext32s_tl(arg
, arg
);
5211 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5212 rn
= "ContextConfig";
5213 goto cp0_unimplemented
;
5215 CP0_CHECK(ctx
->ulri
);
5216 tcg_gen_ld_tl(arg
, cpu_env
,
5217 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5218 tcg_gen_ext32s_tl(arg
, arg
);
5222 goto cp0_unimplemented
;
5228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5232 check_insn(ctx
, ISA_MIPS32R2
);
5233 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5238 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5239 tcg_gen_ext32s_tl(arg
, arg
);
5244 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5245 tcg_gen_ext32s_tl(arg
, arg
);
5250 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5251 tcg_gen_ext32s_tl(arg
, arg
);
5255 goto cp0_unimplemented
;
5261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5265 check_insn(ctx
, ISA_MIPS32R2
);
5266 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5270 check_insn(ctx
, ISA_MIPS32R2
);
5271 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5275 check_insn(ctx
, ISA_MIPS32R2
);
5276 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5280 check_insn(ctx
, ISA_MIPS32R2
);
5281 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5285 check_insn(ctx
, ISA_MIPS32R2
);
5286 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5290 goto cp0_unimplemented
;
5296 check_insn(ctx
, ISA_MIPS32R2
);
5297 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5301 goto cp0_unimplemented
;
5307 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5308 tcg_gen_ext32s_tl(arg
, arg
);
5313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5318 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5322 goto cp0_unimplemented
;
5328 /* Mark as an IO operation because we read the time. */
5329 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5332 gen_helper_mfc0_count(arg
, cpu_env
);
5333 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5336 /* Break the TB to be able to take timer interrupts immediately
5337 after reading count. */
5338 ctx
->bstate
= BS_STOP
;
5341 /* 6,7 are implementation dependent */
5343 goto cp0_unimplemented
;
5349 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5350 tcg_gen_ext32s_tl(arg
, arg
);
5354 goto cp0_unimplemented
;
5360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5363 /* 6,7 are implementation dependent */
5365 goto cp0_unimplemented
;
5371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5375 check_insn(ctx
, ISA_MIPS32R2
);
5376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5380 check_insn(ctx
, ISA_MIPS32R2
);
5381 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5385 check_insn(ctx
, ISA_MIPS32R2
);
5386 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5390 goto cp0_unimplemented
;
5396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5400 goto cp0_unimplemented
;
5406 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5407 tcg_gen_ext32s_tl(arg
, arg
);
5411 goto cp0_unimplemented
;
5417 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5421 check_insn(ctx
, ISA_MIPS32R2
);
5422 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
5423 tcg_gen_ext32s_tl(arg
, arg
);
5427 check_insn(ctx
, ISA_MIPS32R2
);
5428 CP0_CHECK(ctx
->cmgcr
);
5429 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5430 tcg_gen_ext32s_tl(arg
, arg
);
5434 goto cp0_unimplemented
;
5440 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5444 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5448 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5463 /* 6,7 are implementation dependent */
5465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5473 goto cp0_unimplemented
;
5479 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5483 CP0_CHECK(ctx
->mrp
);
5484 gen_helper_mfc0_maar(arg
, cpu_env
);
5488 CP0_CHECK(ctx
->mrp
);
5489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5493 goto cp0_unimplemented
;
5499 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5503 goto cp0_unimplemented
;
5509 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5513 goto cp0_unimplemented
;
5519 #if defined(TARGET_MIPS64)
5520 check_insn(ctx
, ISA_MIPS3
);
5521 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5522 tcg_gen_ext32s_tl(arg
, arg
);
5527 goto cp0_unimplemented
;
5531 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5532 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5539 goto cp0_unimplemented
;
5543 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5544 rn
= "'Diagnostic"; /* implementation dependent */
5549 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5553 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5554 rn
= "TraceControl";
5555 goto cp0_unimplemented
;
5557 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5558 rn
= "TraceControl2";
5559 goto cp0_unimplemented
;
5561 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5562 rn
= "UserTraceData";
5563 goto cp0_unimplemented
;
5565 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5567 goto cp0_unimplemented
;
5569 goto cp0_unimplemented
;
5576 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5577 tcg_gen_ext32s_tl(arg
, arg
);
5581 goto cp0_unimplemented
;
5587 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5588 rn
= "Performance0";
5591 // gen_helper_mfc0_performance1(arg);
5592 rn
= "Performance1";
5593 goto cp0_unimplemented
;
5595 // gen_helper_mfc0_performance2(arg);
5596 rn
= "Performance2";
5597 goto cp0_unimplemented
;
5599 // gen_helper_mfc0_performance3(arg);
5600 rn
= "Performance3";
5601 goto cp0_unimplemented
;
5603 // gen_helper_mfc0_performance4(arg);
5604 rn
= "Performance4";
5605 goto cp0_unimplemented
;
5607 // gen_helper_mfc0_performance5(arg);
5608 rn
= "Performance5";
5609 goto cp0_unimplemented
;
5611 // gen_helper_mfc0_performance6(arg);
5612 rn
= "Performance6";
5613 goto cp0_unimplemented
;
5615 // gen_helper_mfc0_performance7(arg);
5616 rn
= "Performance7";
5617 goto cp0_unimplemented
;
5619 goto cp0_unimplemented
;
5625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5629 goto cp0_unimplemented
;
5635 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5639 goto cp0_unimplemented
;
5649 TCGv_i64 tmp
= tcg_temp_new_i64();
5650 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5651 gen_move_low32(arg
, tmp
);
5652 tcg_temp_free_i64(tmp
);
5660 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5664 goto cp0_unimplemented
;
5673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5680 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5684 goto cp0_unimplemented
;
5690 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5691 tcg_gen_ext32s_tl(arg
, arg
);
5695 goto cp0_unimplemented
;
5702 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5706 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5707 tcg_gen_ld_tl(arg
, cpu_env
,
5708 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5709 tcg_gen_ext32s_tl(arg
, arg
);
5713 goto cp0_unimplemented
;
5717 goto cp0_unimplemented
;
5719 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5723 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5724 gen_mfc0_unimplemented(ctx
, arg
);
5727 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5729 const char *rn
= "invalid";
5732 check_insn(ctx
, ISA_MIPS32
);
5734 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5742 gen_helper_mtc0_index(cpu_env
, arg
);
5746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5747 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5756 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5766 goto cp0_unimplemented
;
5776 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5777 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5781 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5782 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5786 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5787 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5791 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5792 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5796 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5797 tcg_gen_st_tl(arg
, cpu_env
,
5798 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5802 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5803 tcg_gen_st_tl(arg
, cpu_env
,
5804 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5805 rn
= "VPEScheFBack";
5808 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5809 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5813 goto cp0_unimplemented
;
5819 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5823 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5824 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5828 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5829 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5833 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5834 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5838 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5839 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5843 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5844 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5849 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5854 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5858 goto cp0_unimplemented
;
5864 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5870 rn
= "GlobalNumber";
5873 goto cp0_unimplemented
;
5879 gen_helper_mtc0_context(cpu_env
, arg
);
5883 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5884 rn
= "ContextConfig";
5885 goto cp0_unimplemented
;
5887 CP0_CHECK(ctx
->ulri
);
5888 tcg_gen_st_tl(arg
, cpu_env
,
5889 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5893 goto cp0_unimplemented
;
5899 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5903 check_insn(ctx
, ISA_MIPS32R2
);
5904 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5906 ctx
->bstate
= BS_STOP
;
5910 gen_helper_mtc0_segctl0(cpu_env
, arg
);
5915 gen_helper_mtc0_segctl1(cpu_env
, arg
);
5920 gen_helper_mtc0_segctl2(cpu_env
, arg
);
5924 goto cp0_unimplemented
;
5930 gen_helper_mtc0_wired(cpu_env
, arg
);
5934 check_insn(ctx
, ISA_MIPS32R2
);
5935 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5939 check_insn(ctx
, ISA_MIPS32R2
);
5940 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5944 check_insn(ctx
, ISA_MIPS32R2
);
5945 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5949 check_insn(ctx
, ISA_MIPS32R2
);
5950 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5954 check_insn(ctx
, ISA_MIPS32R2
);
5955 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5959 goto cp0_unimplemented
;
5965 check_insn(ctx
, ISA_MIPS32R2
);
5966 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5967 ctx
->bstate
= BS_STOP
;
5971 goto cp0_unimplemented
;
5989 goto cp0_unimplemented
;
5995 gen_helper_mtc0_count(cpu_env
, arg
);
5998 /* 6,7 are implementation dependent */
6000 goto cp0_unimplemented
;
6006 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6010 goto cp0_unimplemented
;
6016 gen_helper_mtc0_compare(cpu_env
, arg
);
6019 /* 6,7 are implementation dependent */
6021 goto cp0_unimplemented
;
6027 save_cpu_state(ctx
, 1);
6028 gen_helper_mtc0_status(cpu_env
, arg
);
6029 /* BS_STOP isn't good enough here, hflags may have changed. */
6030 gen_save_pc(ctx
->pc
+ 4);
6031 ctx
->bstate
= BS_EXCP
;
6035 check_insn(ctx
, ISA_MIPS32R2
);
6036 gen_helper_mtc0_intctl(cpu_env
, arg
);
6037 /* Stop translation as we may have switched the execution mode */
6038 ctx
->bstate
= BS_STOP
;
6042 check_insn(ctx
, ISA_MIPS32R2
);
6043 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6044 /* Stop translation as we may have switched the execution mode */
6045 ctx
->bstate
= BS_STOP
;
6049 check_insn(ctx
, ISA_MIPS32R2
);
6050 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6051 /* Stop translation as we may have switched the execution mode */
6052 ctx
->bstate
= BS_STOP
;
6056 goto cp0_unimplemented
;
6062 save_cpu_state(ctx
, 1);
6063 gen_helper_mtc0_cause(cpu_env
, arg
);
6067 goto cp0_unimplemented
;
6073 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6077 goto cp0_unimplemented
;
6087 check_insn(ctx
, ISA_MIPS32R2
);
6088 gen_helper_mtc0_ebase(cpu_env
, arg
);
6092 goto cp0_unimplemented
;
6098 gen_helper_mtc0_config0(cpu_env
, arg
);
6100 /* Stop translation as we may have switched the execution mode */
6101 ctx
->bstate
= BS_STOP
;
6104 /* ignored, read only */
6108 gen_helper_mtc0_config2(cpu_env
, arg
);
6110 /* Stop translation as we may have switched the execution mode */
6111 ctx
->bstate
= BS_STOP
;
6114 gen_helper_mtc0_config3(cpu_env
, arg
);
6116 /* Stop translation as we may have switched the execution mode */
6117 ctx
->bstate
= BS_STOP
;
6120 gen_helper_mtc0_config4(cpu_env
, arg
);
6122 ctx
->bstate
= BS_STOP
;
6125 gen_helper_mtc0_config5(cpu_env
, arg
);
6127 /* Stop translation as we may have switched the execution mode */
6128 ctx
->bstate
= BS_STOP
;
6130 /* 6,7 are implementation dependent */
6140 rn
= "Invalid config selector";
6141 goto cp0_unimplemented
;
6147 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6151 CP0_CHECK(ctx
->mrp
);
6152 gen_helper_mtc0_maar(cpu_env
, arg
);
6156 CP0_CHECK(ctx
->mrp
);
6157 gen_helper_mtc0_maari(cpu_env
, arg
);
6161 goto cp0_unimplemented
;
6167 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6171 goto cp0_unimplemented
;
6177 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6181 goto cp0_unimplemented
;
6187 #if defined(TARGET_MIPS64)
6188 check_insn(ctx
, ISA_MIPS3
);
6189 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6194 goto cp0_unimplemented
;
6198 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6199 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6202 gen_helper_mtc0_framemask(cpu_env
, arg
);
6206 goto cp0_unimplemented
;
6211 rn
= "Diagnostic"; /* implementation dependent */
6216 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6217 /* BS_STOP isn't good enough here, hflags may have changed. */
6218 gen_save_pc(ctx
->pc
+ 4);
6219 ctx
->bstate
= BS_EXCP
;
6223 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6224 rn
= "TraceControl";
6225 /* Stop translation as we may have switched the execution mode */
6226 ctx
->bstate
= BS_STOP
;
6227 goto cp0_unimplemented
;
6229 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6230 rn
= "TraceControl2";
6231 /* Stop translation as we may have switched the execution mode */
6232 ctx
->bstate
= BS_STOP
;
6233 goto cp0_unimplemented
;
6235 /* Stop translation as we may have switched the execution mode */
6236 ctx
->bstate
= BS_STOP
;
6237 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6238 rn
= "UserTraceData";
6239 /* Stop translation as we may have switched the execution mode */
6240 ctx
->bstate
= BS_STOP
;
6241 goto cp0_unimplemented
;
6243 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6244 /* Stop translation as we may have switched the execution mode */
6245 ctx
->bstate
= BS_STOP
;
6247 goto cp0_unimplemented
;
6249 goto cp0_unimplemented
;
6256 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6260 goto cp0_unimplemented
;
6266 gen_helper_mtc0_performance0(cpu_env
, arg
);
6267 rn
= "Performance0";
6270 // gen_helper_mtc0_performance1(arg);
6271 rn
= "Performance1";
6272 goto cp0_unimplemented
;
6274 // gen_helper_mtc0_performance2(arg);
6275 rn
= "Performance2";
6276 goto cp0_unimplemented
;
6278 // gen_helper_mtc0_performance3(arg);
6279 rn
= "Performance3";
6280 goto cp0_unimplemented
;
6282 // gen_helper_mtc0_performance4(arg);
6283 rn
= "Performance4";
6284 goto cp0_unimplemented
;
6286 // gen_helper_mtc0_performance5(arg);
6287 rn
= "Performance5";
6288 goto cp0_unimplemented
;
6290 // gen_helper_mtc0_performance6(arg);
6291 rn
= "Performance6";
6292 goto cp0_unimplemented
;
6294 // gen_helper_mtc0_performance7(arg);
6295 rn
= "Performance7";
6296 goto cp0_unimplemented
;
6298 goto cp0_unimplemented
;
6304 gen_helper_mtc0_errctl(cpu_env
, arg
);
6305 ctx
->bstate
= BS_STOP
;
6309 goto cp0_unimplemented
;
6319 goto cp0_unimplemented
;
6328 gen_helper_mtc0_taglo(cpu_env
, arg
);
6335 gen_helper_mtc0_datalo(cpu_env
, arg
);
6339 goto cp0_unimplemented
;
6348 gen_helper_mtc0_taghi(cpu_env
, arg
);
6355 gen_helper_mtc0_datahi(cpu_env
, arg
);
6360 goto cp0_unimplemented
;
6366 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6370 goto cp0_unimplemented
;
6377 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6381 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6382 tcg_gen_st_tl(arg
, cpu_env
,
6383 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6387 goto cp0_unimplemented
;
6391 goto cp0_unimplemented
;
6393 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6395 /* For simplicity assume that all writes can cause interrupts. */
6396 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6398 ctx
->bstate
= BS_STOP
;
6403 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6406 #if defined(TARGET_MIPS64)
6407 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6409 const char *rn
= "invalid";
6412 check_insn(ctx
, ISA_MIPS64
);
6418 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6422 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6423 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6427 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6428 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6432 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6433 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6438 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6442 goto cp0_unimplemented
;
6448 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6449 gen_helper_mfc0_random(arg
, cpu_env
);
6453 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6454 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6458 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6463 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6464 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6468 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6469 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6473 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6474 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6478 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6479 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6480 rn
= "VPEScheFBack";
6483 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6484 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6488 goto cp0_unimplemented
;
6494 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6498 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6499 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6503 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6504 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6508 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6509 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6513 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6514 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6518 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6519 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6523 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6524 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6528 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6529 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6533 goto cp0_unimplemented
;
6539 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6545 rn
= "GlobalNumber";
6548 goto cp0_unimplemented
;
6554 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6558 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6559 rn
= "ContextConfig";
6560 goto cp0_unimplemented
;
6562 CP0_CHECK(ctx
->ulri
);
6563 tcg_gen_ld_tl(arg
, cpu_env
,
6564 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6568 goto cp0_unimplemented
;
6574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6578 check_insn(ctx
, ISA_MIPS32R2
);
6579 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6584 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6589 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6594 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6598 goto cp0_unimplemented
;
6604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6608 check_insn(ctx
, ISA_MIPS32R2
);
6609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6613 check_insn(ctx
, ISA_MIPS32R2
);
6614 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6618 check_insn(ctx
, ISA_MIPS32R2
);
6619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6623 check_insn(ctx
, ISA_MIPS32R2
);
6624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6628 check_insn(ctx
, ISA_MIPS32R2
);
6629 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6633 goto cp0_unimplemented
;
6639 check_insn(ctx
, ISA_MIPS32R2
);
6640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6644 goto cp0_unimplemented
;
6650 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6655 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6660 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6664 goto cp0_unimplemented
;
6670 /* Mark as an IO operation because we read the time. */
6671 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6674 gen_helper_mfc0_count(arg
, cpu_env
);
6675 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6678 /* Break the TB to be able to take timer interrupts immediately
6679 after reading count. */
6680 ctx
->bstate
= BS_STOP
;
6683 /* 6,7 are implementation dependent */
6685 goto cp0_unimplemented
;
6691 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6695 goto cp0_unimplemented
;
6701 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6704 /* 6,7 are implementation dependent */
6706 goto cp0_unimplemented
;
6712 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6716 check_insn(ctx
, ISA_MIPS32R2
);
6717 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6721 check_insn(ctx
, ISA_MIPS32R2
);
6722 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6726 check_insn(ctx
, ISA_MIPS32R2
);
6727 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6731 goto cp0_unimplemented
;
6737 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6741 goto cp0_unimplemented
;
6747 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6751 goto cp0_unimplemented
;
6757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6761 check_insn(ctx
, ISA_MIPS32R2
);
6762 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6766 check_insn(ctx
, ISA_MIPS32R2
);
6767 CP0_CHECK(ctx
->cmgcr
);
6768 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6772 goto cp0_unimplemented
;
6778 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6782 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6786 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6790 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6794 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6798 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6801 /* 6,7 are implementation dependent */
6803 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6807 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6811 goto cp0_unimplemented
;
6817 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6821 CP0_CHECK(ctx
->mrp
);
6822 gen_helper_dmfc0_maar(arg
, cpu_env
);
6826 CP0_CHECK(ctx
->mrp
);
6827 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6831 goto cp0_unimplemented
;
6837 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6841 goto cp0_unimplemented
;
6847 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6851 goto cp0_unimplemented
;
6857 check_insn(ctx
, ISA_MIPS3
);
6858 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6862 goto cp0_unimplemented
;
6866 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6867 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6870 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6874 goto cp0_unimplemented
;
6878 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6879 rn
= "'Diagnostic"; /* implementation dependent */
6884 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6888 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6889 rn
= "TraceControl";
6890 goto cp0_unimplemented
;
6892 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6893 rn
= "TraceControl2";
6894 goto cp0_unimplemented
;
6896 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6897 rn
= "UserTraceData";
6898 goto cp0_unimplemented
;
6900 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6902 goto cp0_unimplemented
;
6904 goto cp0_unimplemented
;
6911 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6915 goto cp0_unimplemented
;
6921 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6922 rn
= "Performance0";
6925 // gen_helper_dmfc0_performance1(arg);
6926 rn
= "Performance1";
6927 goto cp0_unimplemented
;
6929 // gen_helper_dmfc0_performance2(arg);
6930 rn
= "Performance2";
6931 goto cp0_unimplemented
;
6933 // gen_helper_dmfc0_performance3(arg);
6934 rn
= "Performance3";
6935 goto cp0_unimplemented
;
6937 // gen_helper_dmfc0_performance4(arg);
6938 rn
= "Performance4";
6939 goto cp0_unimplemented
;
6941 // gen_helper_dmfc0_performance5(arg);
6942 rn
= "Performance5";
6943 goto cp0_unimplemented
;
6945 // gen_helper_dmfc0_performance6(arg);
6946 rn
= "Performance6";
6947 goto cp0_unimplemented
;
6949 // gen_helper_dmfc0_performance7(arg);
6950 rn
= "Performance7";
6951 goto cp0_unimplemented
;
6953 goto cp0_unimplemented
;
6959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6963 goto cp0_unimplemented
;
6970 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6974 goto cp0_unimplemented
;
6983 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6990 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6994 goto cp0_unimplemented
;
7003 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7010 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7014 goto cp0_unimplemented
;
7020 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7024 goto cp0_unimplemented
;
7031 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7035 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7036 tcg_gen_ld_tl(arg
, cpu_env
,
7037 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7041 goto cp0_unimplemented
;
7045 goto cp0_unimplemented
;
7047 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
7051 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7052 gen_mfc0_unimplemented(ctx
, arg
);
7055 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7057 const char *rn
= "invalid";
7060 check_insn(ctx
, ISA_MIPS64
);
7062 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7070 gen_helper_mtc0_index(cpu_env
, arg
);
7074 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7075 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7094 goto cp0_unimplemented
;
7104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7105 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7109 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7110 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7114 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7115 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7119 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7120 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7124 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7125 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7129 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7130 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7131 rn
= "VPEScheFBack";
7134 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7135 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7139 goto cp0_unimplemented
;
7145 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7149 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7150 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7154 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7155 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7159 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7160 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7164 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7165 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7169 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7170 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7174 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7175 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7179 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7180 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7184 goto cp0_unimplemented
;
7190 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7196 rn
= "GlobalNumber";
7199 goto cp0_unimplemented
;
7205 gen_helper_mtc0_context(cpu_env
, arg
);
7209 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7210 rn
= "ContextConfig";
7211 goto cp0_unimplemented
;
7213 CP0_CHECK(ctx
->ulri
);
7214 tcg_gen_st_tl(arg
, cpu_env
,
7215 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7219 goto cp0_unimplemented
;
7225 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7229 check_insn(ctx
, ISA_MIPS32R2
);
7230 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7235 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7240 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7245 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7249 goto cp0_unimplemented
;
7255 gen_helper_mtc0_wired(cpu_env
, arg
);
7259 check_insn(ctx
, ISA_MIPS32R2
);
7260 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7264 check_insn(ctx
, ISA_MIPS32R2
);
7265 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7269 check_insn(ctx
, ISA_MIPS32R2
);
7270 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7274 check_insn(ctx
, ISA_MIPS32R2
);
7275 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7279 check_insn(ctx
, ISA_MIPS32R2
);
7280 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7284 goto cp0_unimplemented
;
7290 check_insn(ctx
, ISA_MIPS32R2
);
7291 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7292 ctx
->bstate
= BS_STOP
;
7296 goto cp0_unimplemented
;
7314 goto cp0_unimplemented
;
7320 gen_helper_mtc0_count(cpu_env
, arg
);
7323 /* 6,7 are implementation dependent */
7325 goto cp0_unimplemented
;
7327 /* Stop translation as we may have switched the execution mode */
7328 ctx
->bstate
= BS_STOP
;
7333 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7337 goto cp0_unimplemented
;
7343 gen_helper_mtc0_compare(cpu_env
, arg
);
7346 /* 6,7 are implementation dependent */
7348 goto cp0_unimplemented
;
7350 /* Stop translation as we may have switched the execution mode */
7351 ctx
->bstate
= BS_STOP
;
7356 save_cpu_state(ctx
, 1);
7357 gen_helper_mtc0_status(cpu_env
, arg
);
7358 /* BS_STOP isn't good enough here, hflags may have changed. */
7359 gen_save_pc(ctx
->pc
+ 4);
7360 ctx
->bstate
= BS_EXCP
;
7364 check_insn(ctx
, ISA_MIPS32R2
);
7365 gen_helper_mtc0_intctl(cpu_env
, arg
);
7366 /* Stop translation as we may have switched the execution mode */
7367 ctx
->bstate
= BS_STOP
;
7371 check_insn(ctx
, ISA_MIPS32R2
);
7372 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7373 /* Stop translation as we may have switched the execution mode */
7374 ctx
->bstate
= BS_STOP
;
7378 check_insn(ctx
, ISA_MIPS32R2
);
7379 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7380 /* Stop translation as we may have switched the execution mode */
7381 ctx
->bstate
= BS_STOP
;
7385 goto cp0_unimplemented
;
7391 save_cpu_state(ctx
, 1);
7392 /* Mark as an IO operation because we may trigger a software
7394 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7397 gen_helper_mtc0_cause(cpu_env
, arg
);
7398 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7401 /* Stop translation as we may have triggered an intetrupt */
7402 ctx
->bstate
= BS_STOP
;
7406 goto cp0_unimplemented
;
7412 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7416 goto cp0_unimplemented
;
7426 check_insn(ctx
, ISA_MIPS32R2
);
7427 gen_helper_mtc0_ebase(cpu_env
, arg
);
7431 goto cp0_unimplemented
;
7437 gen_helper_mtc0_config0(cpu_env
, arg
);
7439 /* Stop translation as we may have switched the execution mode */
7440 ctx
->bstate
= BS_STOP
;
7443 /* ignored, read only */
7447 gen_helper_mtc0_config2(cpu_env
, arg
);
7449 /* Stop translation as we may have switched the execution mode */
7450 ctx
->bstate
= BS_STOP
;
7453 gen_helper_mtc0_config3(cpu_env
, arg
);
7455 /* Stop translation as we may have switched the execution mode */
7456 ctx
->bstate
= BS_STOP
;
7459 /* currently ignored */
7463 gen_helper_mtc0_config5(cpu_env
, arg
);
7465 /* Stop translation as we may have switched the execution mode */
7466 ctx
->bstate
= BS_STOP
;
7468 /* 6,7 are implementation dependent */
7470 rn
= "Invalid config selector";
7471 goto cp0_unimplemented
;
7477 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7481 CP0_CHECK(ctx
->mrp
);
7482 gen_helper_mtc0_maar(cpu_env
, arg
);
7486 CP0_CHECK(ctx
->mrp
);
7487 gen_helper_mtc0_maari(cpu_env
, arg
);
7491 goto cp0_unimplemented
;
7497 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7501 goto cp0_unimplemented
;
7507 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7511 goto cp0_unimplemented
;
7517 check_insn(ctx
, ISA_MIPS3
);
7518 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7522 goto cp0_unimplemented
;
7526 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7527 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7530 gen_helper_mtc0_framemask(cpu_env
, arg
);
7534 goto cp0_unimplemented
;
7539 rn
= "Diagnostic"; /* implementation dependent */
7544 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7545 /* BS_STOP isn't good enough here, hflags may have changed. */
7546 gen_save_pc(ctx
->pc
+ 4);
7547 ctx
->bstate
= BS_EXCP
;
7551 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7552 /* Stop translation as we may have switched the execution mode */
7553 ctx
->bstate
= BS_STOP
;
7554 rn
= "TraceControl";
7555 goto cp0_unimplemented
;
7557 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7558 /* Stop translation as we may have switched the execution mode */
7559 ctx
->bstate
= BS_STOP
;
7560 rn
= "TraceControl2";
7561 goto cp0_unimplemented
;
7563 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7564 /* Stop translation as we may have switched the execution mode */
7565 ctx
->bstate
= BS_STOP
;
7566 rn
= "UserTraceData";
7567 goto cp0_unimplemented
;
7569 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7570 /* Stop translation as we may have switched the execution mode */
7571 ctx
->bstate
= BS_STOP
;
7573 goto cp0_unimplemented
;
7575 goto cp0_unimplemented
;
7582 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7586 goto cp0_unimplemented
;
7592 gen_helper_mtc0_performance0(cpu_env
, arg
);
7593 rn
= "Performance0";
7596 // gen_helper_mtc0_performance1(cpu_env, arg);
7597 rn
= "Performance1";
7598 goto cp0_unimplemented
;
7600 // gen_helper_mtc0_performance2(cpu_env, arg);
7601 rn
= "Performance2";
7602 goto cp0_unimplemented
;
7604 // gen_helper_mtc0_performance3(cpu_env, arg);
7605 rn
= "Performance3";
7606 goto cp0_unimplemented
;
7608 // gen_helper_mtc0_performance4(cpu_env, arg);
7609 rn
= "Performance4";
7610 goto cp0_unimplemented
;
7612 // gen_helper_mtc0_performance5(cpu_env, arg);
7613 rn
= "Performance5";
7614 goto cp0_unimplemented
;
7616 // gen_helper_mtc0_performance6(cpu_env, arg);
7617 rn
= "Performance6";
7618 goto cp0_unimplemented
;
7620 // gen_helper_mtc0_performance7(cpu_env, arg);
7621 rn
= "Performance7";
7622 goto cp0_unimplemented
;
7624 goto cp0_unimplemented
;
7630 gen_helper_mtc0_errctl(cpu_env
, arg
);
7631 ctx
->bstate
= BS_STOP
;
7635 goto cp0_unimplemented
;
7645 goto cp0_unimplemented
;
7654 gen_helper_mtc0_taglo(cpu_env
, arg
);
7661 gen_helper_mtc0_datalo(cpu_env
, arg
);
7665 goto cp0_unimplemented
;
7674 gen_helper_mtc0_taghi(cpu_env
, arg
);
7681 gen_helper_mtc0_datahi(cpu_env
, arg
);
7686 goto cp0_unimplemented
;
7692 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7696 goto cp0_unimplemented
;
7703 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7707 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7708 tcg_gen_st_tl(arg
, cpu_env
,
7709 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7713 goto cp0_unimplemented
;
7717 goto cp0_unimplemented
;
7719 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7721 /* For simplicity assume that all writes can cause interrupts. */
7722 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7724 ctx
->bstate
= BS_STOP
;
7729 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7731 #endif /* TARGET_MIPS64 */
7733 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7734 int u
, int sel
, int h
)
7736 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7737 TCGv t0
= tcg_temp_local_new();
7739 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7740 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7741 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7742 tcg_gen_movi_tl(t0
, -1);
7743 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7744 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7745 tcg_gen_movi_tl(t0
, -1);
7751 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7754 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7764 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7767 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7770 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7773 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7776 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7779 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7782 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7785 gen_mfc0(ctx
, t0
, rt
, sel
);
7792 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7795 gen_mfc0(ctx
, t0
, rt
, sel
);
7801 gen_helper_mftc0_status(t0
, cpu_env
);
7804 gen_mfc0(ctx
, t0
, rt
, sel
);
7810 gen_helper_mftc0_cause(t0
, cpu_env
);
7820 gen_helper_mftc0_epc(t0
, cpu_env
);
7830 gen_helper_mftc0_ebase(t0
, cpu_env
);
7840 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7850 gen_helper_mftc0_debug(t0
, cpu_env
);
7853 gen_mfc0(ctx
, t0
, rt
, sel
);
7858 gen_mfc0(ctx
, t0
, rt
, sel
);
7860 } else switch (sel
) {
7861 /* GPR registers. */
7863 gen_helper_1e0i(mftgpr
, t0
, rt
);
7865 /* Auxiliary CPU registers */
7869 gen_helper_1e0i(mftlo
, t0
, 0);
7872 gen_helper_1e0i(mfthi
, t0
, 0);
7875 gen_helper_1e0i(mftacx
, t0
, 0);
7878 gen_helper_1e0i(mftlo
, t0
, 1);
7881 gen_helper_1e0i(mfthi
, t0
, 1);
7884 gen_helper_1e0i(mftacx
, t0
, 1);
7887 gen_helper_1e0i(mftlo
, t0
, 2);
7890 gen_helper_1e0i(mfthi
, t0
, 2);
7893 gen_helper_1e0i(mftacx
, t0
, 2);
7896 gen_helper_1e0i(mftlo
, t0
, 3);
7899 gen_helper_1e0i(mfthi
, t0
, 3);
7902 gen_helper_1e0i(mftacx
, t0
, 3);
7905 gen_helper_mftdsp(t0
, cpu_env
);
7911 /* Floating point (COP1). */
7913 /* XXX: For now we support only a single FPU context. */
7915 TCGv_i32 fp0
= tcg_temp_new_i32();
7917 gen_load_fpr32(ctx
, fp0
, rt
);
7918 tcg_gen_ext_i32_tl(t0
, fp0
);
7919 tcg_temp_free_i32(fp0
);
7921 TCGv_i32 fp0
= tcg_temp_new_i32();
7923 gen_load_fpr32h(ctx
, fp0
, rt
);
7924 tcg_gen_ext_i32_tl(t0
, fp0
);
7925 tcg_temp_free_i32(fp0
);
7929 /* XXX: For now we support only a single FPU context. */
7930 gen_helper_1e0i(cfc1
, t0
, rt
);
7932 /* COP2: Not implemented. */
7939 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7940 gen_store_gpr(t0
, rd
);
7946 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7947 generate_exception_end(ctx
, EXCP_RI
);
7950 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7951 int u
, int sel
, int h
)
7953 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7954 TCGv t0
= tcg_temp_local_new();
7956 gen_load_gpr(t0
, rt
);
7957 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7958 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7959 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7961 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7962 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7969 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7972 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7982 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7985 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7988 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7991 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7994 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7997 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8000 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8003 gen_mtc0(ctx
, t0
, rd
, sel
);
8010 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8013 gen_mtc0(ctx
, t0
, rd
, sel
);
8019 gen_helper_mttc0_status(cpu_env
, t0
);
8022 gen_mtc0(ctx
, t0
, rd
, sel
);
8028 gen_helper_mttc0_cause(cpu_env
, t0
);
8038 gen_helper_mttc0_ebase(cpu_env
, t0
);
8048 gen_helper_mttc0_debug(cpu_env
, t0
);
8051 gen_mtc0(ctx
, t0
, rd
, sel
);
8056 gen_mtc0(ctx
, t0
, rd
, sel
);
8058 } else switch (sel
) {
8059 /* GPR registers. */
8061 gen_helper_0e1i(mttgpr
, t0
, rd
);
8063 /* Auxiliary CPU registers */
8067 gen_helper_0e1i(mttlo
, t0
, 0);
8070 gen_helper_0e1i(mtthi
, t0
, 0);
8073 gen_helper_0e1i(mttacx
, t0
, 0);
8076 gen_helper_0e1i(mttlo
, t0
, 1);
8079 gen_helper_0e1i(mtthi
, t0
, 1);
8082 gen_helper_0e1i(mttacx
, t0
, 1);
8085 gen_helper_0e1i(mttlo
, t0
, 2);
8088 gen_helper_0e1i(mtthi
, t0
, 2);
8091 gen_helper_0e1i(mttacx
, t0
, 2);
8094 gen_helper_0e1i(mttlo
, t0
, 3);
8097 gen_helper_0e1i(mtthi
, t0
, 3);
8100 gen_helper_0e1i(mttacx
, t0
, 3);
8103 gen_helper_mttdsp(cpu_env
, t0
);
8109 /* Floating point (COP1). */
8111 /* XXX: For now we support only a single FPU context. */
8113 TCGv_i32 fp0
= tcg_temp_new_i32();
8115 tcg_gen_trunc_tl_i32(fp0
, t0
);
8116 gen_store_fpr32(ctx
, fp0
, rd
);
8117 tcg_temp_free_i32(fp0
);
8119 TCGv_i32 fp0
= tcg_temp_new_i32();
8121 tcg_gen_trunc_tl_i32(fp0
, t0
);
8122 gen_store_fpr32h(ctx
, fp0
, rd
);
8123 tcg_temp_free_i32(fp0
);
8127 /* XXX: For now we support only a single FPU context. */
8129 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8131 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8132 tcg_temp_free_i32(fs_tmp
);
8134 /* Stop translation as we may have changed hflags */
8135 ctx
->bstate
= BS_STOP
;
8137 /* COP2: Not implemented. */
8144 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8150 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8151 generate_exception_end(ctx
, EXCP_RI
);
8154 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8156 const char *opn
= "ldst";
8158 check_cp0_enabled(ctx
);
8165 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8170 TCGv t0
= tcg_temp_new();
8172 gen_load_gpr(t0
, rt
);
8173 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8178 #if defined(TARGET_MIPS64)
8180 check_insn(ctx
, ISA_MIPS3
);
8185 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8189 check_insn(ctx
, ISA_MIPS3
);
8191 TCGv t0
= tcg_temp_new();
8193 gen_load_gpr(t0
, rt
);
8194 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8206 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8212 TCGv t0
= tcg_temp_new();
8213 gen_load_gpr(t0
, rt
);
8214 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8220 check_insn(ctx
, ASE_MT
);
8225 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8226 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8230 check_insn(ctx
, ASE_MT
);
8231 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8232 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8237 if (!env
->tlb
->helper_tlbwi
)
8239 gen_helper_tlbwi(cpu_env
);
8244 if (!env
->tlb
->helper_tlbinv
) {
8247 gen_helper_tlbinv(cpu_env
);
8248 } /* treat as nop if TLBINV not supported */
8253 if (!env
->tlb
->helper_tlbinvf
) {
8256 gen_helper_tlbinvf(cpu_env
);
8257 } /* treat as nop if TLBINV not supported */
8261 if (!env
->tlb
->helper_tlbwr
)
8263 gen_helper_tlbwr(cpu_env
);
8267 if (!env
->tlb
->helper_tlbp
)
8269 gen_helper_tlbp(cpu_env
);
8273 if (!env
->tlb
->helper_tlbr
)
8275 gen_helper_tlbr(cpu_env
);
8277 case OPC_ERET
: /* OPC_ERETNC */
8278 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8279 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8282 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8283 if (ctx
->opcode
& (1 << bit_shift
)) {
8286 check_insn(ctx
, ISA_MIPS32R5
);
8287 gen_helper_eretnc(cpu_env
);
8291 check_insn(ctx
, ISA_MIPS2
);
8292 gen_helper_eret(cpu_env
);
8294 ctx
->bstate
= BS_EXCP
;
8299 check_insn(ctx
, ISA_MIPS32
);
8300 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8301 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8304 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8306 generate_exception_end(ctx
, EXCP_RI
);
8308 gen_helper_deret(cpu_env
);
8309 ctx
->bstate
= BS_EXCP
;
8314 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8315 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8316 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8319 /* If we get an exception, we want to restart at next instruction */
8321 save_cpu_state(ctx
, 1);
8323 gen_helper_wait(cpu_env
);
8324 ctx
->bstate
= BS_EXCP
;
8329 generate_exception_end(ctx
, EXCP_RI
);
8332 (void)opn
; /* avoid a compiler warning */
8334 #endif /* !CONFIG_USER_ONLY */
8336 /* CP1 Branches (before delay slot) */
8337 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8338 int32_t cc
, int32_t offset
)
8340 target_ulong btarget
;
8341 TCGv_i32 t0
= tcg_temp_new_i32();
8343 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8344 generate_exception_end(ctx
, EXCP_RI
);
8349 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8351 btarget
= ctx
->pc
+ 4 + offset
;
8355 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8356 tcg_gen_not_i32(t0
, t0
);
8357 tcg_gen_andi_i32(t0
, t0
, 1);
8358 tcg_gen_extu_i32_tl(bcond
, t0
);
8361 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8362 tcg_gen_not_i32(t0
, t0
);
8363 tcg_gen_andi_i32(t0
, t0
, 1);
8364 tcg_gen_extu_i32_tl(bcond
, t0
);
8367 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8368 tcg_gen_andi_i32(t0
, t0
, 1);
8369 tcg_gen_extu_i32_tl(bcond
, t0
);
8372 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8373 tcg_gen_andi_i32(t0
, t0
, 1);
8374 tcg_gen_extu_i32_tl(bcond
, t0
);
8376 ctx
->hflags
|= MIPS_HFLAG_BL
;
8380 TCGv_i32 t1
= tcg_temp_new_i32();
8381 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8382 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8383 tcg_gen_nand_i32(t0
, t0
, t1
);
8384 tcg_temp_free_i32(t1
);
8385 tcg_gen_andi_i32(t0
, t0
, 1);
8386 tcg_gen_extu_i32_tl(bcond
, t0
);
8391 TCGv_i32 t1
= tcg_temp_new_i32();
8392 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8393 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8394 tcg_gen_or_i32(t0
, t0
, t1
);
8395 tcg_temp_free_i32(t1
);
8396 tcg_gen_andi_i32(t0
, t0
, 1);
8397 tcg_gen_extu_i32_tl(bcond
, t0
);
8402 TCGv_i32 t1
= tcg_temp_new_i32();
8403 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8404 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8405 tcg_gen_and_i32(t0
, t0
, t1
);
8406 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8407 tcg_gen_and_i32(t0
, t0
, t1
);
8408 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8409 tcg_gen_nand_i32(t0
, t0
, t1
);
8410 tcg_temp_free_i32(t1
);
8411 tcg_gen_andi_i32(t0
, t0
, 1);
8412 tcg_gen_extu_i32_tl(bcond
, t0
);
8417 TCGv_i32 t1
= tcg_temp_new_i32();
8418 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8419 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8420 tcg_gen_or_i32(t0
, t0
, t1
);
8421 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8422 tcg_gen_or_i32(t0
, t0
, t1
);
8423 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8424 tcg_gen_or_i32(t0
, t0
, t1
);
8425 tcg_temp_free_i32(t1
);
8426 tcg_gen_andi_i32(t0
, t0
, 1);
8427 tcg_gen_extu_i32_tl(bcond
, t0
);
8430 ctx
->hflags
|= MIPS_HFLAG_BC
;
8433 MIPS_INVAL("cp1 cond branch");
8434 generate_exception_end(ctx
, EXCP_RI
);
8437 ctx
->btarget
= btarget
;
8438 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8440 tcg_temp_free_i32(t0
);
8443 /* R6 CP1 Branches */
8444 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8445 int32_t ft
, int32_t offset
,
8448 target_ulong btarget
;
8449 TCGv_i64 t0
= tcg_temp_new_i64();
8451 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8452 #ifdef MIPS_DEBUG_DISAS
8453 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8456 generate_exception_end(ctx
, EXCP_RI
);
8460 gen_load_fpr64(ctx
, t0
, ft
);
8461 tcg_gen_andi_i64(t0
, t0
, 1);
8463 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8467 tcg_gen_xori_i64(t0
, t0
, 1);
8468 ctx
->hflags
|= MIPS_HFLAG_BC
;
8471 /* t0 already set */
8472 ctx
->hflags
|= MIPS_HFLAG_BC
;
8475 MIPS_INVAL("cp1 cond branch");
8476 generate_exception_end(ctx
, EXCP_RI
);
8480 tcg_gen_trunc_i64_tl(bcond
, t0
);
8482 ctx
->btarget
= btarget
;
8484 switch (delayslot_size
) {
8486 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8489 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8494 tcg_temp_free_i64(t0
);
8497 /* Coprocessor 1 (FPU) */
8499 #define FOP(func, fmt) (((fmt) << 21) | (func))
8502 OPC_ADD_S
= FOP(0, FMT_S
),
8503 OPC_SUB_S
= FOP(1, FMT_S
),
8504 OPC_MUL_S
= FOP(2, FMT_S
),
8505 OPC_DIV_S
= FOP(3, FMT_S
),
8506 OPC_SQRT_S
= FOP(4, FMT_S
),
8507 OPC_ABS_S
= FOP(5, FMT_S
),
8508 OPC_MOV_S
= FOP(6, FMT_S
),
8509 OPC_NEG_S
= FOP(7, FMT_S
),
8510 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8511 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8512 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8513 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8514 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8515 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8516 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8517 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8518 OPC_SEL_S
= FOP(16, FMT_S
),
8519 OPC_MOVCF_S
= FOP(17, FMT_S
),
8520 OPC_MOVZ_S
= FOP(18, FMT_S
),
8521 OPC_MOVN_S
= FOP(19, FMT_S
),
8522 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8523 OPC_RECIP_S
= FOP(21, FMT_S
),
8524 OPC_RSQRT_S
= FOP(22, FMT_S
),
8525 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8526 OPC_MADDF_S
= FOP(24, FMT_S
),
8527 OPC_MSUBF_S
= FOP(25, FMT_S
),
8528 OPC_RINT_S
= FOP(26, FMT_S
),
8529 OPC_CLASS_S
= FOP(27, FMT_S
),
8530 OPC_MIN_S
= FOP(28, FMT_S
),
8531 OPC_RECIP2_S
= FOP(28, FMT_S
),
8532 OPC_MINA_S
= FOP(29, FMT_S
),
8533 OPC_RECIP1_S
= FOP(29, FMT_S
),
8534 OPC_MAX_S
= FOP(30, FMT_S
),
8535 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8536 OPC_MAXA_S
= FOP(31, FMT_S
),
8537 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8538 OPC_CVT_D_S
= FOP(33, FMT_S
),
8539 OPC_CVT_W_S
= FOP(36, FMT_S
),
8540 OPC_CVT_L_S
= FOP(37, FMT_S
),
8541 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8542 OPC_CMP_F_S
= FOP (48, FMT_S
),
8543 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8544 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8545 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8546 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8547 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8548 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8549 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8550 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8551 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8552 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8553 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8554 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8555 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8556 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8557 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8559 OPC_ADD_D
= FOP(0, FMT_D
),
8560 OPC_SUB_D
= FOP(1, FMT_D
),
8561 OPC_MUL_D
= FOP(2, FMT_D
),
8562 OPC_DIV_D
= FOP(3, FMT_D
),
8563 OPC_SQRT_D
= FOP(4, FMT_D
),
8564 OPC_ABS_D
= FOP(5, FMT_D
),
8565 OPC_MOV_D
= FOP(6, FMT_D
),
8566 OPC_NEG_D
= FOP(7, FMT_D
),
8567 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8568 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8569 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8570 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8571 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8572 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8573 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8574 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8575 OPC_SEL_D
= FOP(16, FMT_D
),
8576 OPC_MOVCF_D
= FOP(17, FMT_D
),
8577 OPC_MOVZ_D
= FOP(18, FMT_D
),
8578 OPC_MOVN_D
= FOP(19, FMT_D
),
8579 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8580 OPC_RECIP_D
= FOP(21, FMT_D
),
8581 OPC_RSQRT_D
= FOP(22, FMT_D
),
8582 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8583 OPC_MADDF_D
= FOP(24, FMT_D
),
8584 OPC_MSUBF_D
= FOP(25, FMT_D
),
8585 OPC_RINT_D
= FOP(26, FMT_D
),
8586 OPC_CLASS_D
= FOP(27, FMT_D
),
8587 OPC_MIN_D
= FOP(28, FMT_D
),
8588 OPC_RECIP2_D
= FOP(28, FMT_D
),
8589 OPC_MINA_D
= FOP(29, FMT_D
),
8590 OPC_RECIP1_D
= FOP(29, FMT_D
),
8591 OPC_MAX_D
= FOP(30, FMT_D
),
8592 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8593 OPC_MAXA_D
= FOP(31, FMT_D
),
8594 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8595 OPC_CVT_S_D
= FOP(32, FMT_D
),
8596 OPC_CVT_W_D
= FOP(36, FMT_D
),
8597 OPC_CVT_L_D
= FOP(37, FMT_D
),
8598 OPC_CMP_F_D
= FOP (48, FMT_D
),
8599 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8600 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8601 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8602 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8603 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8604 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8605 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8606 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8607 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8608 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8609 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8610 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8611 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8612 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8613 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8615 OPC_CVT_S_W
= FOP(32, FMT_W
),
8616 OPC_CVT_D_W
= FOP(33, FMT_W
),
8617 OPC_CVT_S_L
= FOP(32, FMT_L
),
8618 OPC_CVT_D_L
= FOP(33, FMT_L
),
8619 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8621 OPC_ADD_PS
= FOP(0, FMT_PS
),
8622 OPC_SUB_PS
= FOP(1, FMT_PS
),
8623 OPC_MUL_PS
= FOP(2, FMT_PS
),
8624 OPC_DIV_PS
= FOP(3, FMT_PS
),
8625 OPC_ABS_PS
= FOP(5, FMT_PS
),
8626 OPC_MOV_PS
= FOP(6, FMT_PS
),
8627 OPC_NEG_PS
= FOP(7, FMT_PS
),
8628 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8629 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8630 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8631 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8632 OPC_MULR_PS
= FOP(26, FMT_PS
),
8633 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8634 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8635 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8636 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8638 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8639 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8640 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8641 OPC_PLL_PS
= FOP(44, FMT_PS
),
8642 OPC_PLU_PS
= FOP(45, FMT_PS
),
8643 OPC_PUL_PS
= FOP(46, FMT_PS
),
8644 OPC_PUU_PS
= FOP(47, FMT_PS
),
8645 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8646 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8647 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8648 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8649 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8650 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8651 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8652 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8653 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8654 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8655 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8656 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8657 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8658 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8659 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8660 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8664 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8665 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8666 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8667 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8668 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8669 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8670 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8671 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8672 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8673 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8674 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8675 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8676 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8677 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8678 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8679 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8680 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8681 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8682 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8683 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8684 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8685 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8687 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8688 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8689 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8690 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8691 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8692 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8693 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8694 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8695 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8696 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8697 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8698 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8699 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8700 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8701 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8702 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8703 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8704 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8705 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8706 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8707 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8708 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8710 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8712 TCGv t0
= tcg_temp_new();
8717 TCGv_i32 fp0
= tcg_temp_new_i32();
8719 gen_load_fpr32(ctx
, fp0
, fs
);
8720 tcg_gen_ext_i32_tl(t0
, fp0
);
8721 tcg_temp_free_i32(fp0
);
8723 gen_store_gpr(t0
, rt
);
8726 gen_load_gpr(t0
, rt
);
8728 TCGv_i32 fp0
= tcg_temp_new_i32();
8730 tcg_gen_trunc_tl_i32(fp0
, t0
);
8731 gen_store_fpr32(ctx
, fp0
, fs
);
8732 tcg_temp_free_i32(fp0
);
8736 gen_helper_1e0i(cfc1
, t0
, fs
);
8737 gen_store_gpr(t0
, rt
);
8740 gen_load_gpr(t0
, rt
);
8741 save_cpu_state(ctx
, 0);
8743 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8745 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8746 tcg_temp_free_i32(fs_tmp
);
8748 /* Stop translation as we may have changed hflags */
8749 ctx
->bstate
= BS_STOP
;
8751 #if defined(TARGET_MIPS64)
8753 gen_load_fpr64(ctx
, t0
, fs
);
8754 gen_store_gpr(t0
, rt
);
8757 gen_load_gpr(t0
, rt
);
8758 gen_store_fpr64(ctx
, t0
, fs
);
8763 TCGv_i32 fp0
= tcg_temp_new_i32();
8765 gen_load_fpr32h(ctx
, fp0
, fs
);
8766 tcg_gen_ext_i32_tl(t0
, fp0
);
8767 tcg_temp_free_i32(fp0
);
8769 gen_store_gpr(t0
, rt
);
8772 gen_load_gpr(t0
, rt
);
8774 TCGv_i32 fp0
= tcg_temp_new_i32();
8776 tcg_gen_trunc_tl_i32(fp0
, t0
);
8777 gen_store_fpr32h(ctx
, fp0
, fs
);
8778 tcg_temp_free_i32(fp0
);
8782 MIPS_INVAL("cp1 move");
8783 generate_exception_end(ctx
, EXCP_RI
);
8791 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8807 l1
= gen_new_label();
8808 t0
= tcg_temp_new_i32();
8809 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8810 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8811 tcg_temp_free_i32(t0
);
8813 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8815 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8820 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8824 TCGv_i32 t0
= tcg_temp_new_i32();
8825 TCGLabel
*l1
= gen_new_label();
8832 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8833 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8834 gen_load_fpr32(ctx
, t0
, fs
);
8835 gen_store_fpr32(ctx
, t0
, fd
);
8837 tcg_temp_free_i32(t0
);
8840 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8843 TCGv_i32 t0
= tcg_temp_new_i32();
8845 TCGLabel
*l1
= gen_new_label();
8852 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8853 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8854 tcg_temp_free_i32(t0
);
8855 fp0
= tcg_temp_new_i64();
8856 gen_load_fpr64(ctx
, fp0
, fs
);
8857 gen_store_fpr64(ctx
, fp0
, fd
);
8858 tcg_temp_free_i64(fp0
);
8862 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8866 TCGv_i32 t0
= tcg_temp_new_i32();
8867 TCGLabel
*l1
= gen_new_label();
8868 TCGLabel
*l2
= gen_new_label();
8875 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8876 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8877 gen_load_fpr32(ctx
, t0
, fs
);
8878 gen_store_fpr32(ctx
, t0
, fd
);
8881 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8882 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8883 gen_load_fpr32h(ctx
, t0
, fs
);
8884 gen_store_fpr32h(ctx
, t0
, fd
);
8885 tcg_temp_free_i32(t0
);
8889 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8892 TCGv_i32 t1
= tcg_const_i32(0);
8893 TCGv_i32 fp0
= tcg_temp_new_i32();
8894 TCGv_i32 fp1
= tcg_temp_new_i32();
8895 TCGv_i32 fp2
= tcg_temp_new_i32();
8896 gen_load_fpr32(ctx
, fp0
, fd
);
8897 gen_load_fpr32(ctx
, fp1
, ft
);
8898 gen_load_fpr32(ctx
, fp2
, fs
);
8902 tcg_gen_andi_i32(fp0
, fp0
, 1);
8903 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8906 tcg_gen_andi_i32(fp1
, fp1
, 1);
8907 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8910 tcg_gen_andi_i32(fp1
, fp1
, 1);
8911 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8914 MIPS_INVAL("gen_sel_s");
8915 generate_exception_end(ctx
, EXCP_RI
);
8919 gen_store_fpr32(ctx
, fp0
, fd
);
8920 tcg_temp_free_i32(fp2
);
8921 tcg_temp_free_i32(fp1
);
8922 tcg_temp_free_i32(fp0
);
8923 tcg_temp_free_i32(t1
);
8926 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8929 TCGv_i64 t1
= tcg_const_i64(0);
8930 TCGv_i64 fp0
= tcg_temp_new_i64();
8931 TCGv_i64 fp1
= tcg_temp_new_i64();
8932 TCGv_i64 fp2
= tcg_temp_new_i64();
8933 gen_load_fpr64(ctx
, fp0
, fd
);
8934 gen_load_fpr64(ctx
, fp1
, ft
);
8935 gen_load_fpr64(ctx
, fp2
, fs
);
8939 tcg_gen_andi_i64(fp0
, fp0
, 1);
8940 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8943 tcg_gen_andi_i64(fp1
, fp1
, 1);
8944 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8947 tcg_gen_andi_i64(fp1
, fp1
, 1);
8948 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8951 MIPS_INVAL("gen_sel_d");
8952 generate_exception_end(ctx
, EXCP_RI
);
8956 gen_store_fpr64(ctx
, fp0
, fd
);
8957 tcg_temp_free_i64(fp2
);
8958 tcg_temp_free_i64(fp1
);
8959 tcg_temp_free_i64(fp0
);
8960 tcg_temp_free_i64(t1
);
8963 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8964 int ft
, int fs
, int fd
, int cc
)
8966 uint32_t func
= ctx
->opcode
& 0x3f;
8970 TCGv_i32 fp0
= tcg_temp_new_i32();
8971 TCGv_i32 fp1
= tcg_temp_new_i32();
8973 gen_load_fpr32(ctx
, fp0
, fs
);
8974 gen_load_fpr32(ctx
, fp1
, ft
);
8975 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8976 tcg_temp_free_i32(fp1
);
8977 gen_store_fpr32(ctx
, fp0
, fd
);
8978 tcg_temp_free_i32(fp0
);
8983 TCGv_i32 fp0
= tcg_temp_new_i32();
8984 TCGv_i32 fp1
= tcg_temp_new_i32();
8986 gen_load_fpr32(ctx
, fp0
, fs
);
8987 gen_load_fpr32(ctx
, fp1
, ft
);
8988 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8989 tcg_temp_free_i32(fp1
);
8990 gen_store_fpr32(ctx
, fp0
, fd
);
8991 tcg_temp_free_i32(fp0
);
8996 TCGv_i32 fp0
= tcg_temp_new_i32();
8997 TCGv_i32 fp1
= tcg_temp_new_i32();
8999 gen_load_fpr32(ctx
, fp0
, fs
);
9000 gen_load_fpr32(ctx
, fp1
, ft
);
9001 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9002 tcg_temp_free_i32(fp1
);
9003 gen_store_fpr32(ctx
, fp0
, fd
);
9004 tcg_temp_free_i32(fp0
);
9009 TCGv_i32 fp0
= tcg_temp_new_i32();
9010 TCGv_i32 fp1
= tcg_temp_new_i32();
9012 gen_load_fpr32(ctx
, fp0
, fs
);
9013 gen_load_fpr32(ctx
, fp1
, ft
);
9014 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9015 tcg_temp_free_i32(fp1
);
9016 gen_store_fpr32(ctx
, fp0
, fd
);
9017 tcg_temp_free_i32(fp0
);
9022 TCGv_i32 fp0
= tcg_temp_new_i32();
9024 gen_load_fpr32(ctx
, fp0
, fs
);
9025 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9026 gen_store_fpr32(ctx
, fp0
, fd
);
9027 tcg_temp_free_i32(fp0
);
9032 TCGv_i32 fp0
= tcg_temp_new_i32();
9034 gen_load_fpr32(ctx
, fp0
, fs
);
9036 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9038 gen_helper_float_abs_s(fp0
, fp0
);
9040 gen_store_fpr32(ctx
, fp0
, fd
);
9041 tcg_temp_free_i32(fp0
);
9046 TCGv_i32 fp0
= tcg_temp_new_i32();
9048 gen_load_fpr32(ctx
, fp0
, fs
);
9049 gen_store_fpr32(ctx
, fp0
, fd
);
9050 tcg_temp_free_i32(fp0
);
9055 TCGv_i32 fp0
= tcg_temp_new_i32();
9057 gen_load_fpr32(ctx
, fp0
, fs
);
9059 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9061 gen_helper_float_chs_s(fp0
, fp0
);
9063 gen_store_fpr32(ctx
, fp0
, fd
);
9064 tcg_temp_free_i32(fp0
);
9068 check_cp1_64bitmode(ctx
);
9070 TCGv_i32 fp32
= tcg_temp_new_i32();
9071 TCGv_i64 fp64
= tcg_temp_new_i64();
9073 gen_load_fpr32(ctx
, fp32
, fs
);
9075 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
9077 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
9079 tcg_temp_free_i32(fp32
);
9080 gen_store_fpr64(ctx
, fp64
, fd
);
9081 tcg_temp_free_i64(fp64
);
9085 check_cp1_64bitmode(ctx
);
9087 TCGv_i32 fp32
= tcg_temp_new_i32();
9088 TCGv_i64 fp64
= tcg_temp_new_i64();
9090 gen_load_fpr32(ctx
, fp32
, fs
);
9092 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
9094 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
9096 tcg_temp_free_i32(fp32
);
9097 gen_store_fpr64(ctx
, fp64
, fd
);
9098 tcg_temp_free_i64(fp64
);
9102 check_cp1_64bitmode(ctx
);
9104 TCGv_i32 fp32
= tcg_temp_new_i32();
9105 TCGv_i64 fp64
= tcg_temp_new_i64();
9107 gen_load_fpr32(ctx
, fp32
, fs
);
9109 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
9111 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
9113 tcg_temp_free_i32(fp32
);
9114 gen_store_fpr64(ctx
, fp64
, fd
);
9115 tcg_temp_free_i64(fp64
);
9119 check_cp1_64bitmode(ctx
);
9121 TCGv_i32 fp32
= tcg_temp_new_i32();
9122 TCGv_i64 fp64
= tcg_temp_new_i64();
9124 gen_load_fpr32(ctx
, fp32
, fs
);
9126 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
9128 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9130 tcg_temp_free_i32(fp32
);
9131 gen_store_fpr64(ctx
, fp64
, fd
);
9132 tcg_temp_free_i64(fp64
);
9137 TCGv_i32 fp0
= tcg_temp_new_i32();
9139 gen_load_fpr32(ctx
, fp0
, fs
);
9141 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9143 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9145 gen_store_fpr32(ctx
, fp0
, fd
);
9146 tcg_temp_free_i32(fp0
);
9151 TCGv_i32 fp0
= tcg_temp_new_i32();
9153 gen_load_fpr32(ctx
, fp0
, fs
);
9155 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9157 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9159 gen_store_fpr32(ctx
, fp0
, fd
);
9160 tcg_temp_free_i32(fp0
);
9165 TCGv_i32 fp0
= tcg_temp_new_i32();
9167 gen_load_fpr32(ctx
, fp0
, fs
);
9169 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9171 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9173 gen_store_fpr32(ctx
, fp0
, fd
);
9174 tcg_temp_free_i32(fp0
);
9179 TCGv_i32 fp0
= tcg_temp_new_i32();
9181 gen_load_fpr32(ctx
, fp0
, fs
);
9183 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9185 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9187 gen_store_fpr32(ctx
, fp0
, fd
);
9188 tcg_temp_free_i32(fp0
);
9192 check_insn(ctx
, ISA_MIPS32R6
);
9193 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9196 check_insn(ctx
, ISA_MIPS32R6
);
9197 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9200 check_insn(ctx
, ISA_MIPS32R6
);
9201 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9205 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9210 TCGLabel
*l1
= gen_new_label();
9214 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9216 fp0
= tcg_temp_new_i32();
9217 gen_load_fpr32(ctx
, fp0
, fs
);
9218 gen_store_fpr32(ctx
, fp0
, fd
);
9219 tcg_temp_free_i32(fp0
);
9224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9226 TCGLabel
*l1
= gen_new_label();
9230 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9231 fp0
= tcg_temp_new_i32();
9232 gen_load_fpr32(ctx
, fp0
, fs
);
9233 gen_store_fpr32(ctx
, fp0
, fd
);
9234 tcg_temp_free_i32(fp0
);
9241 TCGv_i32 fp0
= tcg_temp_new_i32();
9243 gen_load_fpr32(ctx
, fp0
, fs
);
9244 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9245 gen_store_fpr32(ctx
, fp0
, fd
);
9246 tcg_temp_free_i32(fp0
);
9251 TCGv_i32 fp0
= tcg_temp_new_i32();
9253 gen_load_fpr32(ctx
, fp0
, fs
);
9254 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9255 gen_store_fpr32(ctx
, fp0
, fd
);
9256 tcg_temp_free_i32(fp0
);
9260 check_insn(ctx
, ISA_MIPS32R6
);
9262 TCGv_i32 fp0
= tcg_temp_new_i32();
9263 TCGv_i32 fp1
= tcg_temp_new_i32();
9264 TCGv_i32 fp2
= tcg_temp_new_i32();
9265 gen_load_fpr32(ctx
, fp0
, fs
);
9266 gen_load_fpr32(ctx
, fp1
, ft
);
9267 gen_load_fpr32(ctx
, fp2
, fd
);
9268 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9269 gen_store_fpr32(ctx
, fp2
, fd
);
9270 tcg_temp_free_i32(fp2
);
9271 tcg_temp_free_i32(fp1
);
9272 tcg_temp_free_i32(fp0
);
9276 check_insn(ctx
, ISA_MIPS32R6
);
9278 TCGv_i32 fp0
= tcg_temp_new_i32();
9279 TCGv_i32 fp1
= tcg_temp_new_i32();
9280 TCGv_i32 fp2
= tcg_temp_new_i32();
9281 gen_load_fpr32(ctx
, fp0
, fs
);
9282 gen_load_fpr32(ctx
, fp1
, ft
);
9283 gen_load_fpr32(ctx
, fp2
, fd
);
9284 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9285 gen_store_fpr32(ctx
, fp2
, fd
);
9286 tcg_temp_free_i32(fp2
);
9287 tcg_temp_free_i32(fp1
);
9288 tcg_temp_free_i32(fp0
);
9292 check_insn(ctx
, ISA_MIPS32R6
);
9294 TCGv_i32 fp0
= tcg_temp_new_i32();
9295 gen_load_fpr32(ctx
, fp0
, fs
);
9296 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9297 gen_store_fpr32(ctx
, fp0
, fd
);
9298 tcg_temp_free_i32(fp0
);
9302 check_insn(ctx
, ISA_MIPS32R6
);
9304 TCGv_i32 fp0
= tcg_temp_new_i32();
9305 gen_load_fpr32(ctx
, fp0
, fs
);
9306 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9307 gen_store_fpr32(ctx
, fp0
, fd
);
9308 tcg_temp_free_i32(fp0
);
9311 case OPC_MIN_S
: /* OPC_RECIP2_S */
9312 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9314 TCGv_i32 fp0
= tcg_temp_new_i32();
9315 TCGv_i32 fp1
= tcg_temp_new_i32();
9316 TCGv_i32 fp2
= tcg_temp_new_i32();
9317 gen_load_fpr32(ctx
, fp0
, fs
);
9318 gen_load_fpr32(ctx
, fp1
, ft
);
9319 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9320 gen_store_fpr32(ctx
, fp2
, fd
);
9321 tcg_temp_free_i32(fp2
);
9322 tcg_temp_free_i32(fp1
);
9323 tcg_temp_free_i32(fp0
);
9326 check_cp1_64bitmode(ctx
);
9328 TCGv_i32 fp0
= tcg_temp_new_i32();
9329 TCGv_i32 fp1
= tcg_temp_new_i32();
9331 gen_load_fpr32(ctx
, fp0
, fs
);
9332 gen_load_fpr32(ctx
, fp1
, ft
);
9333 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9334 tcg_temp_free_i32(fp1
);
9335 gen_store_fpr32(ctx
, fp0
, fd
);
9336 tcg_temp_free_i32(fp0
);
9340 case OPC_MINA_S
: /* OPC_RECIP1_S */
9341 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9343 TCGv_i32 fp0
= tcg_temp_new_i32();
9344 TCGv_i32 fp1
= tcg_temp_new_i32();
9345 TCGv_i32 fp2
= tcg_temp_new_i32();
9346 gen_load_fpr32(ctx
, fp0
, fs
);
9347 gen_load_fpr32(ctx
, fp1
, ft
);
9348 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9349 gen_store_fpr32(ctx
, fp2
, fd
);
9350 tcg_temp_free_i32(fp2
);
9351 tcg_temp_free_i32(fp1
);
9352 tcg_temp_free_i32(fp0
);
9355 check_cp1_64bitmode(ctx
);
9357 TCGv_i32 fp0
= tcg_temp_new_i32();
9359 gen_load_fpr32(ctx
, fp0
, fs
);
9360 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9361 gen_store_fpr32(ctx
, fp0
, fd
);
9362 tcg_temp_free_i32(fp0
);
9366 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9367 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9369 TCGv_i32 fp0
= tcg_temp_new_i32();
9370 TCGv_i32 fp1
= tcg_temp_new_i32();
9371 gen_load_fpr32(ctx
, fp0
, fs
);
9372 gen_load_fpr32(ctx
, fp1
, ft
);
9373 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9374 gen_store_fpr32(ctx
, fp1
, fd
);
9375 tcg_temp_free_i32(fp1
);
9376 tcg_temp_free_i32(fp0
);
9379 check_cp1_64bitmode(ctx
);
9381 TCGv_i32 fp0
= tcg_temp_new_i32();
9383 gen_load_fpr32(ctx
, fp0
, fs
);
9384 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9385 gen_store_fpr32(ctx
, fp0
, fd
);
9386 tcg_temp_free_i32(fp0
);
9390 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9391 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9393 TCGv_i32 fp0
= tcg_temp_new_i32();
9394 TCGv_i32 fp1
= tcg_temp_new_i32();
9395 gen_load_fpr32(ctx
, fp0
, fs
);
9396 gen_load_fpr32(ctx
, fp1
, ft
);
9397 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9398 gen_store_fpr32(ctx
, fp1
, fd
);
9399 tcg_temp_free_i32(fp1
);
9400 tcg_temp_free_i32(fp0
);
9403 check_cp1_64bitmode(ctx
);
9405 TCGv_i32 fp0
= tcg_temp_new_i32();
9406 TCGv_i32 fp1
= tcg_temp_new_i32();
9408 gen_load_fpr32(ctx
, fp0
, fs
);
9409 gen_load_fpr32(ctx
, fp1
, ft
);
9410 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9411 tcg_temp_free_i32(fp1
);
9412 gen_store_fpr32(ctx
, fp0
, fd
);
9413 tcg_temp_free_i32(fp0
);
9418 check_cp1_registers(ctx
, fd
);
9420 TCGv_i32 fp32
= tcg_temp_new_i32();
9421 TCGv_i64 fp64
= tcg_temp_new_i64();
9423 gen_load_fpr32(ctx
, fp32
, fs
);
9424 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9425 tcg_temp_free_i32(fp32
);
9426 gen_store_fpr64(ctx
, fp64
, fd
);
9427 tcg_temp_free_i64(fp64
);
9432 TCGv_i32 fp0
= tcg_temp_new_i32();
9434 gen_load_fpr32(ctx
, fp0
, fs
);
9436 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9438 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9440 gen_store_fpr32(ctx
, fp0
, fd
);
9441 tcg_temp_free_i32(fp0
);
9445 check_cp1_64bitmode(ctx
);
9447 TCGv_i32 fp32
= tcg_temp_new_i32();
9448 TCGv_i64 fp64
= tcg_temp_new_i64();
9450 gen_load_fpr32(ctx
, fp32
, fs
);
9452 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9454 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9456 tcg_temp_free_i32(fp32
);
9457 gen_store_fpr64(ctx
, fp64
, fd
);
9458 tcg_temp_free_i64(fp64
);
9464 TCGv_i64 fp64
= tcg_temp_new_i64();
9465 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9466 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9468 gen_load_fpr32(ctx
, fp32_0
, fs
);
9469 gen_load_fpr32(ctx
, fp32_1
, ft
);
9470 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9471 tcg_temp_free_i32(fp32_1
);
9472 tcg_temp_free_i32(fp32_0
);
9473 gen_store_fpr64(ctx
, fp64
, fd
);
9474 tcg_temp_free_i64(fp64
);
9486 case OPC_CMP_NGLE_S
:
9493 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9494 if (ctx
->opcode
& (1 << 6)) {
9495 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9497 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9501 check_cp1_registers(ctx
, fs
| ft
| fd
);
9503 TCGv_i64 fp0
= tcg_temp_new_i64();
9504 TCGv_i64 fp1
= tcg_temp_new_i64();
9506 gen_load_fpr64(ctx
, fp0
, fs
);
9507 gen_load_fpr64(ctx
, fp1
, ft
);
9508 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9509 tcg_temp_free_i64(fp1
);
9510 gen_store_fpr64(ctx
, fp0
, fd
);
9511 tcg_temp_free_i64(fp0
);
9515 check_cp1_registers(ctx
, fs
| ft
| fd
);
9517 TCGv_i64 fp0
= tcg_temp_new_i64();
9518 TCGv_i64 fp1
= tcg_temp_new_i64();
9520 gen_load_fpr64(ctx
, fp0
, fs
);
9521 gen_load_fpr64(ctx
, fp1
, ft
);
9522 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9523 tcg_temp_free_i64(fp1
);
9524 gen_store_fpr64(ctx
, fp0
, fd
);
9525 tcg_temp_free_i64(fp0
);
9529 check_cp1_registers(ctx
, fs
| ft
| fd
);
9531 TCGv_i64 fp0
= tcg_temp_new_i64();
9532 TCGv_i64 fp1
= tcg_temp_new_i64();
9534 gen_load_fpr64(ctx
, fp0
, fs
);
9535 gen_load_fpr64(ctx
, fp1
, ft
);
9536 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9537 tcg_temp_free_i64(fp1
);
9538 gen_store_fpr64(ctx
, fp0
, fd
);
9539 tcg_temp_free_i64(fp0
);
9543 check_cp1_registers(ctx
, fs
| ft
| fd
);
9545 TCGv_i64 fp0
= tcg_temp_new_i64();
9546 TCGv_i64 fp1
= tcg_temp_new_i64();
9548 gen_load_fpr64(ctx
, fp0
, fs
);
9549 gen_load_fpr64(ctx
, fp1
, ft
);
9550 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9551 tcg_temp_free_i64(fp1
);
9552 gen_store_fpr64(ctx
, fp0
, fd
);
9553 tcg_temp_free_i64(fp0
);
9557 check_cp1_registers(ctx
, fs
| fd
);
9559 TCGv_i64 fp0
= tcg_temp_new_i64();
9561 gen_load_fpr64(ctx
, fp0
, fs
);
9562 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9563 gen_store_fpr64(ctx
, fp0
, fd
);
9564 tcg_temp_free_i64(fp0
);
9568 check_cp1_registers(ctx
, fs
| fd
);
9570 TCGv_i64 fp0
= tcg_temp_new_i64();
9572 gen_load_fpr64(ctx
, fp0
, fs
);
9574 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9576 gen_helper_float_abs_d(fp0
, fp0
);
9578 gen_store_fpr64(ctx
, fp0
, fd
);
9579 tcg_temp_free_i64(fp0
);
9583 check_cp1_registers(ctx
, fs
| fd
);
9585 TCGv_i64 fp0
= tcg_temp_new_i64();
9587 gen_load_fpr64(ctx
, fp0
, fs
);
9588 gen_store_fpr64(ctx
, fp0
, fd
);
9589 tcg_temp_free_i64(fp0
);
9593 check_cp1_registers(ctx
, fs
| fd
);
9595 TCGv_i64 fp0
= tcg_temp_new_i64();
9597 gen_load_fpr64(ctx
, fp0
, fs
);
9599 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9601 gen_helper_float_chs_d(fp0
, fp0
);
9603 gen_store_fpr64(ctx
, fp0
, fd
);
9604 tcg_temp_free_i64(fp0
);
9608 check_cp1_64bitmode(ctx
);
9610 TCGv_i64 fp0
= tcg_temp_new_i64();
9612 gen_load_fpr64(ctx
, fp0
, fs
);
9614 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9616 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9618 gen_store_fpr64(ctx
, fp0
, fd
);
9619 tcg_temp_free_i64(fp0
);
9623 check_cp1_64bitmode(ctx
);
9625 TCGv_i64 fp0
= tcg_temp_new_i64();
9627 gen_load_fpr64(ctx
, fp0
, fs
);
9629 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9631 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9633 gen_store_fpr64(ctx
, fp0
, fd
);
9634 tcg_temp_free_i64(fp0
);
9638 check_cp1_64bitmode(ctx
);
9640 TCGv_i64 fp0
= tcg_temp_new_i64();
9642 gen_load_fpr64(ctx
, fp0
, fs
);
9644 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9646 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9648 gen_store_fpr64(ctx
, fp0
, fd
);
9649 tcg_temp_free_i64(fp0
);
9653 check_cp1_64bitmode(ctx
);
9655 TCGv_i64 fp0
= tcg_temp_new_i64();
9657 gen_load_fpr64(ctx
, fp0
, fs
);
9659 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9661 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9663 gen_store_fpr64(ctx
, fp0
, fd
);
9664 tcg_temp_free_i64(fp0
);
9668 check_cp1_registers(ctx
, fs
);
9670 TCGv_i32 fp32
= tcg_temp_new_i32();
9671 TCGv_i64 fp64
= tcg_temp_new_i64();
9673 gen_load_fpr64(ctx
, fp64
, fs
);
9675 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9677 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9679 tcg_temp_free_i64(fp64
);
9680 gen_store_fpr32(ctx
, fp32
, fd
);
9681 tcg_temp_free_i32(fp32
);
9685 check_cp1_registers(ctx
, fs
);
9687 TCGv_i32 fp32
= tcg_temp_new_i32();
9688 TCGv_i64 fp64
= tcg_temp_new_i64();
9690 gen_load_fpr64(ctx
, fp64
, fs
);
9692 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9694 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9696 tcg_temp_free_i64(fp64
);
9697 gen_store_fpr32(ctx
, fp32
, fd
);
9698 tcg_temp_free_i32(fp32
);
9702 check_cp1_registers(ctx
, fs
);
9704 TCGv_i32 fp32
= tcg_temp_new_i32();
9705 TCGv_i64 fp64
= tcg_temp_new_i64();
9707 gen_load_fpr64(ctx
, fp64
, fs
);
9709 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9711 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9713 tcg_temp_free_i64(fp64
);
9714 gen_store_fpr32(ctx
, fp32
, fd
);
9715 tcg_temp_free_i32(fp32
);
9719 check_cp1_registers(ctx
, fs
);
9721 TCGv_i32 fp32
= tcg_temp_new_i32();
9722 TCGv_i64 fp64
= tcg_temp_new_i64();
9724 gen_load_fpr64(ctx
, fp64
, fs
);
9726 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9728 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9730 tcg_temp_free_i64(fp64
);
9731 gen_store_fpr32(ctx
, fp32
, fd
);
9732 tcg_temp_free_i32(fp32
);
9736 check_insn(ctx
, ISA_MIPS32R6
);
9737 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9740 check_insn(ctx
, ISA_MIPS32R6
);
9741 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9744 check_insn(ctx
, ISA_MIPS32R6
);
9745 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9748 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9749 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9752 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9754 TCGLabel
*l1
= gen_new_label();
9758 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9760 fp0
= tcg_temp_new_i64();
9761 gen_load_fpr64(ctx
, fp0
, fs
);
9762 gen_store_fpr64(ctx
, fp0
, fd
);
9763 tcg_temp_free_i64(fp0
);
9768 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9770 TCGLabel
*l1
= gen_new_label();
9774 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9775 fp0
= tcg_temp_new_i64();
9776 gen_load_fpr64(ctx
, fp0
, fs
);
9777 gen_store_fpr64(ctx
, fp0
, fd
);
9778 tcg_temp_free_i64(fp0
);
9784 check_cp1_registers(ctx
, fs
| fd
);
9786 TCGv_i64 fp0
= tcg_temp_new_i64();
9788 gen_load_fpr64(ctx
, fp0
, fs
);
9789 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9790 gen_store_fpr64(ctx
, fp0
, fd
);
9791 tcg_temp_free_i64(fp0
);
9795 check_cp1_registers(ctx
, fs
| fd
);
9797 TCGv_i64 fp0
= tcg_temp_new_i64();
9799 gen_load_fpr64(ctx
, fp0
, fs
);
9800 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9801 gen_store_fpr64(ctx
, fp0
, fd
);
9802 tcg_temp_free_i64(fp0
);
9806 check_insn(ctx
, ISA_MIPS32R6
);
9808 TCGv_i64 fp0
= tcg_temp_new_i64();
9809 TCGv_i64 fp1
= tcg_temp_new_i64();
9810 TCGv_i64 fp2
= tcg_temp_new_i64();
9811 gen_load_fpr64(ctx
, fp0
, fs
);
9812 gen_load_fpr64(ctx
, fp1
, ft
);
9813 gen_load_fpr64(ctx
, fp2
, fd
);
9814 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9815 gen_store_fpr64(ctx
, fp2
, fd
);
9816 tcg_temp_free_i64(fp2
);
9817 tcg_temp_free_i64(fp1
);
9818 tcg_temp_free_i64(fp0
);
9822 check_insn(ctx
, ISA_MIPS32R6
);
9824 TCGv_i64 fp0
= tcg_temp_new_i64();
9825 TCGv_i64 fp1
= tcg_temp_new_i64();
9826 TCGv_i64 fp2
= tcg_temp_new_i64();
9827 gen_load_fpr64(ctx
, fp0
, fs
);
9828 gen_load_fpr64(ctx
, fp1
, ft
);
9829 gen_load_fpr64(ctx
, fp2
, fd
);
9830 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9831 gen_store_fpr64(ctx
, fp2
, fd
);
9832 tcg_temp_free_i64(fp2
);
9833 tcg_temp_free_i64(fp1
);
9834 tcg_temp_free_i64(fp0
);
9838 check_insn(ctx
, ISA_MIPS32R6
);
9840 TCGv_i64 fp0
= tcg_temp_new_i64();
9841 gen_load_fpr64(ctx
, fp0
, fs
);
9842 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9843 gen_store_fpr64(ctx
, fp0
, fd
);
9844 tcg_temp_free_i64(fp0
);
9848 check_insn(ctx
, ISA_MIPS32R6
);
9850 TCGv_i64 fp0
= tcg_temp_new_i64();
9851 gen_load_fpr64(ctx
, fp0
, fs
);
9852 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9853 gen_store_fpr64(ctx
, fp0
, fd
);
9854 tcg_temp_free_i64(fp0
);
9857 case OPC_MIN_D
: /* OPC_RECIP2_D */
9858 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9860 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 TCGv_i64 fp1
= tcg_temp_new_i64();
9862 gen_load_fpr64(ctx
, fp0
, fs
);
9863 gen_load_fpr64(ctx
, fp1
, ft
);
9864 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9865 gen_store_fpr64(ctx
, fp1
, fd
);
9866 tcg_temp_free_i64(fp1
);
9867 tcg_temp_free_i64(fp0
);
9870 check_cp1_64bitmode(ctx
);
9872 TCGv_i64 fp0
= tcg_temp_new_i64();
9873 TCGv_i64 fp1
= tcg_temp_new_i64();
9875 gen_load_fpr64(ctx
, fp0
, fs
);
9876 gen_load_fpr64(ctx
, fp1
, ft
);
9877 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9878 tcg_temp_free_i64(fp1
);
9879 gen_store_fpr64(ctx
, fp0
, fd
);
9880 tcg_temp_free_i64(fp0
);
9884 case OPC_MINA_D
: /* OPC_RECIP1_D */
9885 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9887 TCGv_i64 fp0
= tcg_temp_new_i64();
9888 TCGv_i64 fp1
= tcg_temp_new_i64();
9889 gen_load_fpr64(ctx
, fp0
, fs
);
9890 gen_load_fpr64(ctx
, fp1
, ft
);
9891 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9892 gen_store_fpr64(ctx
, fp1
, fd
);
9893 tcg_temp_free_i64(fp1
);
9894 tcg_temp_free_i64(fp0
);
9897 check_cp1_64bitmode(ctx
);
9899 TCGv_i64 fp0
= tcg_temp_new_i64();
9901 gen_load_fpr64(ctx
, fp0
, fs
);
9902 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9903 gen_store_fpr64(ctx
, fp0
, fd
);
9904 tcg_temp_free_i64(fp0
);
9908 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9909 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9911 TCGv_i64 fp0
= tcg_temp_new_i64();
9912 TCGv_i64 fp1
= tcg_temp_new_i64();
9913 gen_load_fpr64(ctx
, fp0
, fs
);
9914 gen_load_fpr64(ctx
, fp1
, ft
);
9915 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9916 gen_store_fpr64(ctx
, fp1
, fd
);
9917 tcg_temp_free_i64(fp1
);
9918 tcg_temp_free_i64(fp0
);
9921 check_cp1_64bitmode(ctx
);
9923 TCGv_i64 fp0
= tcg_temp_new_i64();
9925 gen_load_fpr64(ctx
, fp0
, fs
);
9926 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9927 gen_store_fpr64(ctx
, fp0
, fd
);
9928 tcg_temp_free_i64(fp0
);
9932 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9933 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9935 TCGv_i64 fp0
= tcg_temp_new_i64();
9936 TCGv_i64 fp1
= tcg_temp_new_i64();
9937 gen_load_fpr64(ctx
, fp0
, fs
);
9938 gen_load_fpr64(ctx
, fp1
, ft
);
9939 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9940 gen_store_fpr64(ctx
, fp1
, fd
);
9941 tcg_temp_free_i64(fp1
);
9942 tcg_temp_free_i64(fp0
);
9945 check_cp1_64bitmode(ctx
);
9947 TCGv_i64 fp0
= tcg_temp_new_i64();
9948 TCGv_i64 fp1
= tcg_temp_new_i64();
9950 gen_load_fpr64(ctx
, fp0
, fs
);
9951 gen_load_fpr64(ctx
, fp1
, ft
);
9952 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9953 tcg_temp_free_i64(fp1
);
9954 gen_store_fpr64(ctx
, fp0
, fd
);
9955 tcg_temp_free_i64(fp0
);
9968 case OPC_CMP_NGLE_D
:
9975 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9976 if (ctx
->opcode
& (1 << 6)) {
9977 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9979 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9983 check_cp1_registers(ctx
, fs
);
9985 TCGv_i32 fp32
= tcg_temp_new_i32();
9986 TCGv_i64 fp64
= tcg_temp_new_i64();
9988 gen_load_fpr64(ctx
, fp64
, fs
);
9989 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9990 tcg_temp_free_i64(fp64
);
9991 gen_store_fpr32(ctx
, fp32
, fd
);
9992 tcg_temp_free_i32(fp32
);
9996 check_cp1_registers(ctx
, fs
);
9998 TCGv_i32 fp32
= tcg_temp_new_i32();
9999 TCGv_i64 fp64
= tcg_temp_new_i64();
10001 gen_load_fpr64(ctx
, fp64
, fs
);
10002 if (ctx
->nan2008
) {
10003 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10005 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10007 tcg_temp_free_i64(fp64
);
10008 gen_store_fpr32(ctx
, fp32
, fd
);
10009 tcg_temp_free_i32(fp32
);
10013 check_cp1_64bitmode(ctx
);
10015 TCGv_i64 fp0
= tcg_temp_new_i64();
10017 gen_load_fpr64(ctx
, fp0
, fs
);
10018 if (ctx
->nan2008
) {
10019 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10021 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10023 gen_store_fpr64(ctx
, fp0
, fd
);
10024 tcg_temp_free_i64(fp0
);
10029 TCGv_i32 fp0
= tcg_temp_new_i32();
10031 gen_load_fpr32(ctx
, fp0
, fs
);
10032 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10033 gen_store_fpr32(ctx
, fp0
, fd
);
10034 tcg_temp_free_i32(fp0
);
10038 check_cp1_registers(ctx
, fd
);
10040 TCGv_i32 fp32
= tcg_temp_new_i32();
10041 TCGv_i64 fp64
= tcg_temp_new_i64();
10043 gen_load_fpr32(ctx
, fp32
, fs
);
10044 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10045 tcg_temp_free_i32(fp32
);
10046 gen_store_fpr64(ctx
, fp64
, fd
);
10047 tcg_temp_free_i64(fp64
);
10051 check_cp1_64bitmode(ctx
);
10053 TCGv_i32 fp32
= tcg_temp_new_i32();
10054 TCGv_i64 fp64
= tcg_temp_new_i64();
10056 gen_load_fpr64(ctx
, fp64
, fs
);
10057 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10058 tcg_temp_free_i64(fp64
);
10059 gen_store_fpr32(ctx
, fp32
, fd
);
10060 tcg_temp_free_i32(fp32
);
10064 check_cp1_64bitmode(ctx
);
10066 TCGv_i64 fp0
= tcg_temp_new_i64();
10068 gen_load_fpr64(ctx
, fp0
, fs
);
10069 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
10070 gen_store_fpr64(ctx
, fp0
, fd
);
10071 tcg_temp_free_i64(fp0
);
10074 case OPC_CVT_PS_PW
:
10077 TCGv_i64 fp0
= tcg_temp_new_i64();
10079 gen_load_fpr64(ctx
, fp0
, fs
);
10080 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
10081 gen_store_fpr64(ctx
, fp0
, fd
);
10082 tcg_temp_free_i64(fp0
);
10088 TCGv_i64 fp0
= tcg_temp_new_i64();
10089 TCGv_i64 fp1
= tcg_temp_new_i64();
10091 gen_load_fpr64(ctx
, fp0
, fs
);
10092 gen_load_fpr64(ctx
, fp1
, ft
);
10093 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
10094 tcg_temp_free_i64(fp1
);
10095 gen_store_fpr64(ctx
, fp0
, fd
);
10096 tcg_temp_free_i64(fp0
);
10102 TCGv_i64 fp0
= tcg_temp_new_i64();
10103 TCGv_i64 fp1
= tcg_temp_new_i64();
10105 gen_load_fpr64(ctx
, fp0
, fs
);
10106 gen_load_fpr64(ctx
, fp1
, ft
);
10107 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
10108 tcg_temp_free_i64(fp1
);
10109 gen_store_fpr64(ctx
, fp0
, fd
);
10110 tcg_temp_free_i64(fp0
);
10116 TCGv_i64 fp0
= tcg_temp_new_i64();
10117 TCGv_i64 fp1
= tcg_temp_new_i64();
10119 gen_load_fpr64(ctx
, fp0
, fs
);
10120 gen_load_fpr64(ctx
, fp1
, ft
);
10121 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
10122 tcg_temp_free_i64(fp1
);
10123 gen_store_fpr64(ctx
, fp0
, fd
);
10124 tcg_temp_free_i64(fp0
);
10130 TCGv_i64 fp0
= tcg_temp_new_i64();
10132 gen_load_fpr64(ctx
, fp0
, fs
);
10133 gen_helper_float_abs_ps(fp0
, fp0
);
10134 gen_store_fpr64(ctx
, fp0
, fd
);
10135 tcg_temp_free_i64(fp0
);
10141 TCGv_i64 fp0
= tcg_temp_new_i64();
10143 gen_load_fpr64(ctx
, fp0
, fs
);
10144 gen_store_fpr64(ctx
, fp0
, fd
);
10145 tcg_temp_free_i64(fp0
);
10151 TCGv_i64 fp0
= tcg_temp_new_i64();
10153 gen_load_fpr64(ctx
, fp0
, fs
);
10154 gen_helper_float_chs_ps(fp0
, fp0
);
10155 gen_store_fpr64(ctx
, fp0
, fd
);
10156 tcg_temp_free_i64(fp0
);
10161 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10166 TCGLabel
*l1
= gen_new_label();
10170 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10171 fp0
= tcg_temp_new_i64();
10172 gen_load_fpr64(ctx
, fp0
, fs
);
10173 gen_store_fpr64(ctx
, fp0
, fd
);
10174 tcg_temp_free_i64(fp0
);
10181 TCGLabel
*l1
= gen_new_label();
10185 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10186 fp0
= tcg_temp_new_i64();
10187 gen_load_fpr64(ctx
, fp0
, fs
);
10188 gen_store_fpr64(ctx
, fp0
, fd
);
10189 tcg_temp_free_i64(fp0
);
10197 TCGv_i64 fp0
= tcg_temp_new_i64();
10198 TCGv_i64 fp1
= tcg_temp_new_i64();
10200 gen_load_fpr64(ctx
, fp0
, ft
);
10201 gen_load_fpr64(ctx
, fp1
, fs
);
10202 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10203 tcg_temp_free_i64(fp1
);
10204 gen_store_fpr64(ctx
, fp0
, fd
);
10205 tcg_temp_free_i64(fp0
);
10211 TCGv_i64 fp0
= tcg_temp_new_i64();
10212 TCGv_i64 fp1
= tcg_temp_new_i64();
10214 gen_load_fpr64(ctx
, fp0
, ft
);
10215 gen_load_fpr64(ctx
, fp1
, fs
);
10216 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10217 tcg_temp_free_i64(fp1
);
10218 gen_store_fpr64(ctx
, fp0
, fd
);
10219 tcg_temp_free_i64(fp0
);
10222 case OPC_RECIP2_PS
:
10225 TCGv_i64 fp0
= tcg_temp_new_i64();
10226 TCGv_i64 fp1
= tcg_temp_new_i64();
10228 gen_load_fpr64(ctx
, fp0
, fs
);
10229 gen_load_fpr64(ctx
, fp1
, ft
);
10230 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10231 tcg_temp_free_i64(fp1
);
10232 gen_store_fpr64(ctx
, fp0
, fd
);
10233 tcg_temp_free_i64(fp0
);
10236 case OPC_RECIP1_PS
:
10239 TCGv_i64 fp0
= tcg_temp_new_i64();
10241 gen_load_fpr64(ctx
, fp0
, fs
);
10242 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10243 gen_store_fpr64(ctx
, fp0
, fd
);
10244 tcg_temp_free_i64(fp0
);
10247 case OPC_RSQRT1_PS
:
10250 TCGv_i64 fp0
= tcg_temp_new_i64();
10252 gen_load_fpr64(ctx
, fp0
, fs
);
10253 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10254 gen_store_fpr64(ctx
, fp0
, fd
);
10255 tcg_temp_free_i64(fp0
);
10258 case OPC_RSQRT2_PS
:
10261 TCGv_i64 fp0
= tcg_temp_new_i64();
10262 TCGv_i64 fp1
= tcg_temp_new_i64();
10264 gen_load_fpr64(ctx
, fp0
, fs
);
10265 gen_load_fpr64(ctx
, fp1
, ft
);
10266 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10267 tcg_temp_free_i64(fp1
);
10268 gen_store_fpr64(ctx
, fp0
, fd
);
10269 tcg_temp_free_i64(fp0
);
10273 check_cp1_64bitmode(ctx
);
10275 TCGv_i32 fp0
= tcg_temp_new_i32();
10277 gen_load_fpr32h(ctx
, fp0
, fs
);
10278 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10279 gen_store_fpr32(ctx
, fp0
, fd
);
10280 tcg_temp_free_i32(fp0
);
10283 case OPC_CVT_PW_PS
:
10286 TCGv_i64 fp0
= tcg_temp_new_i64();
10288 gen_load_fpr64(ctx
, fp0
, fs
);
10289 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10290 gen_store_fpr64(ctx
, fp0
, fd
);
10291 tcg_temp_free_i64(fp0
);
10295 check_cp1_64bitmode(ctx
);
10297 TCGv_i32 fp0
= tcg_temp_new_i32();
10299 gen_load_fpr32(ctx
, fp0
, fs
);
10300 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10301 gen_store_fpr32(ctx
, fp0
, fd
);
10302 tcg_temp_free_i32(fp0
);
10308 TCGv_i32 fp0
= tcg_temp_new_i32();
10309 TCGv_i32 fp1
= tcg_temp_new_i32();
10311 gen_load_fpr32(ctx
, fp0
, fs
);
10312 gen_load_fpr32(ctx
, fp1
, ft
);
10313 gen_store_fpr32h(ctx
, fp0
, fd
);
10314 gen_store_fpr32(ctx
, fp1
, fd
);
10315 tcg_temp_free_i32(fp0
);
10316 tcg_temp_free_i32(fp1
);
10322 TCGv_i32 fp0
= tcg_temp_new_i32();
10323 TCGv_i32 fp1
= tcg_temp_new_i32();
10325 gen_load_fpr32(ctx
, fp0
, fs
);
10326 gen_load_fpr32h(ctx
, fp1
, ft
);
10327 gen_store_fpr32(ctx
, fp1
, fd
);
10328 gen_store_fpr32h(ctx
, fp0
, fd
);
10329 tcg_temp_free_i32(fp0
);
10330 tcg_temp_free_i32(fp1
);
10336 TCGv_i32 fp0
= tcg_temp_new_i32();
10337 TCGv_i32 fp1
= tcg_temp_new_i32();
10339 gen_load_fpr32h(ctx
, fp0
, fs
);
10340 gen_load_fpr32(ctx
, fp1
, ft
);
10341 gen_store_fpr32(ctx
, fp1
, fd
);
10342 gen_store_fpr32h(ctx
, fp0
, fd
);
10343 tcg_temp_free_i32(fp0
);
10344 tcg_temp_free_i32(fp1
);
10350 TCGv_i32 fp0
= tcg_temp_new_i32();
10351 TCGv_i32 fp1
= tcg_temp_new_i32();
10353 gen_load_fpr32h(ctx
, fp0
, fs
);
10354 gen_load_fpr32h(ctx
, fp1
, ft
);
10355 gen_store_fpr32(ctx
, fp1
, fd
);
10356 gen_store_fpr32h(ctx
, fp0
, fd
);
10357 tcg_temp_free_i32(fp0
);
10358 tcg_temp_free_i32(fp1
);
10362 case OPC_CMP_UN_PS
:
10363 case OPC_CMP_EQ_PS
:
10364 case OPC_CMP_UEQ_PS
:
10365 case OPC_CMP_OLT_PS
:
10366 case OPC_CMP_ULT_PS
:
10367 case OPC_CMP_OLE_PS
:
10368 case OPC_CMP_ULE_PS
:
10369 case OPC_CMP_SF_PS
:
10370 case OPC_CMP_NGLE_PS
:
10371 case OPC_CMP_SEQ_PS
:
10372 case OPC_CMP_NGL_PS
:
10373 case OPC_CMP_LT_PS
:
10374 case OPC_CMP_NGE_PS
:
10375 case OPC_CMP_LE_PS
:
10376 case OPC_CMP_NGT_PS
:
10377 if (ctx
->opcode
& (1 << 6)) {
10378 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10380 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10384 MIPS_INVAL("farith");
10385 generate_exception_end(ctx
, EXCP_RI
);
10390 /* Coprocessor 3 (FPU) */
10391 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10392 int fd
, int fs
, int base
, int index
)
10394 TCGv t0
= tcg_temp_new();
10397 gen_load_gpr(t0
, index
);
10398 } else if (index
== 0) {
10399 gen_load_gpr(t0
, base
);
10401 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10403 /* Don't do NOP if destination is zero: we must perform the actual
10409 TCGv_i32 fp0
= tcg_temp_new_i32();
10411 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10412 tcg_gen_trunc_tl_i32(fp0
, t0
);
10413 gen_store_fpr32(ctx
, fp0
, fd
);
10414 tcg_temp_free_i32(fp0
);
10419 check_cp1_registers(ctx
, fd
);
10421 TCGv_i64 fp0
= tcg_temp_new_i64();
10422 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10423 gen_store_fpr64(ctx
, fp0
, fd
);
10424 tcg_temp_free_i64(fp0
);
10428 check_cp1_64bitmode(ctx
);
10429 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10431 TCGv_i64 fp0
= tcg_temp_new_i64();
10433 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10434 gen_store_fpr64(ctx
, fp0
, fd
);
10435 tcg_temp_free_i64(fp0
);
10441 TCGv_i32 fp0
= tcg_temp_new_i32();
10442 gen_load_fpr32(ctx
, fp0
, fs
);
10443 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10444 tcg_temp_free_i32(fp0
);
10449 check_cp1_registers(ctx
, fs
);
10451 TCGv_i64 fp0
= tcg_temp_new_i64();
10452 gen_load_fpr64(ctx
, fp0
, fs
);
10453 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10454 tcg_temp_free_i64(fp0
);
10458 check_cp1_64bitmode(ctx
);
10459 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10461 TCGv_i64 fp0
= tcg_temp_new_i64();
10462 gen_load_fpr64(ctx
, fp0
, fs
);
10463 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10464 tcg_temp_free_i64(fp0
);
10471 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10472 int fd
, int fr
, int fs
, int ft
)
10478 TCGv t0
= tcg_temp_local_new();
10479 TCGv_i32 fp
= tcg_temp_new_i32();
10480 TCGv_i32 fph
= tcg_temp_new_i32();
10481 TCGLabel
*l1
= gen_new_label();
10482 TCGLabel
*l2
= gen_new_label();
10484 gen_load_gpr(t0
, fr
);
10485 tcg_gen_andi_tl(t0
, t0
, 0x7);
10487 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10488 gen_load_fpr32(ctx
, fp
, fs
);
10489 gen_load_fpr32h(ctx
, fph
, fs
);
10490 gen_store_fpr32(ctx
, fp
, fd
);
10491 gen_store_fpr32h(ctx
, fph
, fd
);
10494 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10496 #ifdef TARGET_WORDS_BIGENDIAN
10497 gen_load_fpr32(ctx
, fp
, fs
);
10498 gen_load_fpr32h(ctx
, fph
, ft
);
10499 gen_store_fpr32h(ctx
, fp
, fd
);
10500 gen_store_fpr32(ctx
, fph
, fd
);
10502 gen_load_fpr32h(ctx
, fph
, fs
);
10503 gen_load_fpr32(ctx
, fp
, ft
);
10504 gen_store_fpr32(ctx
, fph
, fd
);
10505 gen_store_fpr32h(ctx
, fp
, fd
);
10508 tcg_temp_free_i32(fp
);
10509 tcg_temp_free_i32(fph
);
10515 TCGv_i32 fp0
= tcg_temp_new_i32();
10516 TCGv_i32 fp1
= tcg_temp_new_i32();
10517 TCGv_i32 fp2
= tcg_temp_new_i32();
10519 gen_load_fpr32(ctx
, fp0
, fs
);
10520 gen_load_fpr32(ctx
, fp1
, ft
);
10521 gen_load_fpr32(ctx
, fp2
, fr
);
10522 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10523 tcg_temp_free_i32(fp0
);
10524 tcg_temp_free_i32(fp1
);
10525 gen_store_fpr32(ctx
, fp2
, fd
);
10526 tcg_temp_free_i32(fp2
);
10531 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10533 TCGv_i64 fp0
= tcg_temp_new_i64();
10534 TCGv_i64 fp1
= tcg_temp_new_i64();
10535 TCGv_i64 fp2
= tcg_temp_new_i64();
10537 gen_load_fpr64(ctx
, fp0
, fs
);
10538 gen_load_fpr64(ctx
, fp1
, ft
);
10539 gen_load_fpr64(ctx
, fp2
, fr
);
10540 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10541 tcg_temp_free_i64(fp0
);
10542 tcg_temp_free_i64(fp1
);
10543 gen_store_fpr64(ctx
, fp2
, fd
);
10544 tcg_temp_free_i64(fp2
);
10550 TCGv_i64 fp0
= tcg_temp_new_i64();
10551 TCGv_i64 fp1
= tcg_temp_new_i64();
10552 TCGv_i64 fp2
= tcg_temp_new_i64();
10554 gen_load_fpr64(ctx
, fp0
, fs
);
10555 gen_load_fpr64(ctx
, fp1
, ft
);
10556 gen_load_fpr64(ctx
, fp2
, fr
);
10557 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10558 tcg_temp_free_i64(fp0
);
10559 tcg_temp_free_i64(fp1
);
10560 gen_store_fpr64(ctx
, fp2
, fd
);
10561 tcg_temp_free_i64(fp2
);
10567 TCGv_i32 fp0
= tcg_temp_new_i32();
10568 TCGv_i32 fp1
= tcg_temp_new_i32();
10569 TCGv_i32 fp2
= tcg_temp_new_i32();
10571 gen_load_fpr32(ctx
, fp0
, fs
);
10572 gen_load_fpr32(ctx
, fp1
, ft
);
10573 gen_load_fpr32(ctx
, fp2
, fr
);
10574 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10575 tcg_temp_free_i32(fp0
);
10576 tcg_temp_free_i32(fp1
);
10577 gen_store_fpr32(ctx
, fp2
, fd
);
10578 tcg_temp_free_i32(fp2
);
10583 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10585 TCGv_i64 fp0
= tcg_temp_new_i64();
10586 TCGv_i64 fp1
= tcg_temp_new_i64();
10587 TCGv_i64 fp2
= tcg_temp_new_i64();
10589 gen_load_fpr64(ctx
, fp0
, fs
);
10590 gen_load_fpr64(ctx
, fp1
, ft
);
10591 gen_load_fpr64(ctx
, fp2
, fr
);
10592 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10593 tcg_temp_free_i64(fp0
);
10594 tcg_temp_free_i64(fp1
);
10595 gen_store_fpr64(ctx
, fp2
, fd
);
10596 tcg_temp_free_i64(fp2
);
10602 TCGv_i64 fp0
= tcg_temp_new_i64();
10603 TCGv_i64 fp1
= tcg_temp_new_i64();
10604 TCGv_i64 fp2
= tcg_temp_new_i64();
10606 gen_load_fpr64(ctx
, fp0
, fs
);
10607 gen_load_fpr64(ctx
, fp1
, ft
);
10608 gen_load_fpr64(ctx
, fp2
, fr
);
10609 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10610 tcg_temp_free_i64(fp0
);
10611 tcg_temp_free_i64(fp1
);
10612 gen_store_fpr64(ctx
, fp2
, fd
);
10613 tcg_temp_free_i64(fp2
);
10619 TCGv_i32 fp0
= tcg_temp_new_i32();
10620 TCGv_i32 fp1
= tcg_temp_new_i32();
10621 TCGv_i32 fp2
= tcg_temp_new_i32();
10623 gen_load_fpr32(ctx
, fp0
, fs
);
10624 gen_load_fpr32(ctx
, fp1
, ft
);
10625 gen_load_fpr32(ctx
, fp2
, fr
);
10626 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10627 tcg_temp_free_i32(fp0
);
10628 tcg_temp_free_i32(fp1
);
10629 gen_store_fpr32(ctx
, fp2
, fd
);
10630 tcg_temp_free_i32(fp2
);
10635 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10637 TCGv_i64 fp0
= tcg_temp_new_i64();
10638 TCGv_i64 fp1
= tcg_temp_new_i64();
10639 TCGv_i64 fp2
= tcg_temp_new_i64();
10641 gen_load_fpr64(ctx
, fp0
, fs
);
10642 gen_load_fpr64(ctx
, fp1
, ft
);
10643 gen_load_fpr64(ctx
, fp2
, fr
);
10644 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10645 tcg_temp_free_i64(fp0
);
10646 tcg_temp_free_i64(fp1
);
10647 gen_store_fpr64(ctx
, fp2
, fd
);
10648 tcg_temp_free_i64(fp2
);
10654 TCGv_i64 fp0
= tcg_temp_new_i64();
10655 TCGv_i64 fp1
= tcg_temp_new_i64();
10656 TCGv_i64 fp2
= tcg_temp_new_i64();
10658 gen_load_fpr64(ctx
, fp0
, fs
);
10659 gen_load_fpr64(ctx
, fp1
, ft
);
10660 gen_load_fpr64(ctx
, fp2
, fr
);
10661 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10662 tcg_temp_free_i64(fp0
);
10663 tcg_temp_free_i64(fp1
);
10664 gen_store_fpr64(ctx
, fp2
, fd
);
10665 tcg_temp_free_i64(fp2
);
10671 TCGv_i32 fp0
= tcg_temp_new_i32();
10672 TCGv_i32 fp1
= tcg_temp_new_i32();
10673 TCGv_i32 fp2
= tcg_temp_new_i32();
10675 gen_load_fpr32(ctx
, fp0
, fs
);
10676 gen_load_fpr32(ctx
, fp1
, ft
);
10677 gen_load_fpr32(ctx
, fp2
, fr
);
10678 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10679 tcg_temp_free_i32(fp0
);
10680 tcg_temp_free_i32(fp1
);
10681 gen_store_fpr32(ctx
, fp2
, fd
);
10682 tcg_temp_free_i32(fp2
);
10687 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10689 TCGv_i64 fp0
= tcg_temp_new_i64();
10690 TCGv_i64 fp1
= tcg_temp_new_i64();
10691 TCGv_i64 fp2
= tcg_temp_new_i64();
10693 gen_load_fpr64(ctx
, fp0
, fs
);
10694 gen_load_fpr64(ctx
, fp1
, ft
);
10695 gen_load_fpr64(ctx
, fp2
, fr
);
10696 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10697 tcg_temp_free_i64(fp0
);
10698 tcg_temp_free_i64(fp1
);
10699 gen_store_fpr64(ctx
, fp2
, fd
);
10700 tcg_temp_free_i64(fp2
);
10706 TCGv_i64 fp0
= tcg_temp_new_i64();
10707 TCGv_i64 fp1
= tcg_temp_new_i64();
10708 TCGv_i64 fp2
= tcg_temp_new_i64();
10710 gen_load_fpr64(ctx
, fp0
, fs
);
10711 gen_load_fpr64(ctx
, fp1
, ft
);
10712 gen_load_fpr64(ctx
, fp2
, fr
);
10713 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10714 tcg_temp_free_i64(fp0
);
10715 tcg_temp_free_i64(fp1
);
10716 gen_store_fpr64(ctx
, fp2
, fd
);
10717 tcg_temp_free_i64(fp2
);
10721 MIPS_INVAL("flt3_arith");
10722 generate_exception_end(ctx
, EXCP_RI
);
10727 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10731 #if !defined(CONFIG_USER_ONLY)
10732 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10733 Therefore only check the ISA in system mode. */
10734 check_insn(ctx
, ISA_MIPS32R2
);
10736 t0
= tcg_temp_new();
10740 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10741 gen_store_gpr(t0
, rt
);
10744 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10745 gen_store_gpr(t0
, rt
);
10748 gen_helper_rdhwr_cc(t0
, cpu_env
);
10749 gen_store_gpr(t0
, rt
);
10752 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10753 gen_store_gpr(t0
, rt
);
10756 check_insn(ctx
, ISA_MIPS32R6
);
10758 /* Performance counter registers are not implemented other than
10759 * control register 0.
10761 generate_exception(ctx
, EXCP_RI
);
10763 gen_helper_rdhwr_performance(t0
, cpu_env
);
10764 gen_store_gpr(t0
, rt
);
10767 check_insn(ctx
, ISA_MIPS32R6
);
10768 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10769 gen_store_gpr(t0
, rt
);
10772 #if defined(CONFIG_USER_ONLY)
10773 tcg_gen_ld_tl(t0
, cpu_env
,
10774 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10775 gen_store_gpr(t0
, rt
);
10778 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10779 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10780 tcg_gen_ld_tl(t0
, cpu_env
,
10781 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10782 gen_store_gpr(t0
, rt
);
10784 generate_exception_end(ctx
, EXCP_RI
);
10788 default: /* Invalid */
10789 MIPS_INVAL("rdhwr");
10790 generate_exception_end(ctx
, EXCP_RI
);
10796 static inline void clear_branch_hflags(DisasContext
*ctx
)
10798 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10799 if (ctx
->bstate
== BS_NONE
) {
10800 save_cpu_state(ctx
, 0);
10802 /* it is not safe to save ctx->hflags as hflags may be changed
10803 in execution time by the instruction in delay / forbidden slot. */
10804 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10808 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10810 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10811 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10812 /* Branches completion */
10813 clear_branch_hflags(ctx
);
10814 ctx
->bstate
= BS_BRANCH
;
10815 /* FIXME: Need to clear can_do_io. */
10816 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10817 case MIPS_HFLAG_FBNSLOT
:
10818 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10821 /* unconditional branch */
10822 if (proc_hflags
& MIPS_HFLAG_BX
) {
10823 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10825 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10827 case MIPS_HFLAG_BL
:
10828 /* blikely taken case */
10829 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10831 case MIPS_HFLAG_BC
:
10832 /* Conditional branch */
10834 TCGLabel
*l1
= gen_new_label();
10836 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10837 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10839 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10842 case MIPS_HFLAG_BR
:
10843 /* unconditional branch to register */
10844 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10845 TCGv t0
= tcg_temp_new();
10846 TCGv_i32 t1
= tcg_temp_new_i32();
10848 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10849 tcg_gen_trunc_tl_i32(t1
, t0
);
10851 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10852 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10853 tcg_gen_or_i32(hflags
, hflags
, t1
);
10854 tcg_temp_free_i32(t1
);
10856 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10858 tcg_gen_mov_tl(cpu_PC
, btarget
);
10860 if (ctx
->singlestep_enabled
) {
10861 save_cpu_state(ctx
, 0);
10862 gen_helper_raise_exception_debug(cpu_env
);
10864 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
10867 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10873 /* Compact Branches */
10874 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10875 int rs
, int rt
, int32_t offset
)
10877 int bcond_compute
= 0;
10878 TCGv t0
= tcg_temp_new();
10879 TCGv t1
= tcg_temp_new();
10880 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10882 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10883 #ifdef MIPS_DEBUG_DISAS
10884 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10887 generate_exception_end(ctx
, EXCP_RI
);
10891 /* Load needed operands and calculate btarget */
10893 /* compact branch */
10894 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10895 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10896 gen_load_gpr(t0
, rs
);
10897 gen_load_gpr(t1
, rt
);
10899 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10900 if (rs
<= rt
&& rs
== 0) {
10901 /* OPC_BEQZALC, OPC_BNEZALC */
10902 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10905 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10906 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10907 gen_load_gpr(t0
, rs
);
10908 gen_load_gpr(t1
, rt
);
10910 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10912 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10913 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10914 if (rs
== 0 || rs
== rt
) {
10915 /* OPC_BLEZALC, OPC_BGEZALC */
10916 /* OPC_BGTZALC, OPC_BLTZALC */
10917 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10919 gen_load_gpr(t0
, rs
);
10920 gen_load_gpr(t1
, rt
);
10922 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10926 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10931 /* OPC_BEQZC, OPC_BNEZC */
10932 gen_load_gpr(t0
, rs
);
10934 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10936 /* OPC_JIC, OPC_JIALC */
10937 TCGv tbase
= tcg_temp_new();
10938 TCGv toffset
= tcg_temp_new();
10940 gen_load_gpr(tbase
, rt
);
10941 tcg_gen_movi_tl(toffset
, offset
);
10942 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10943 tcg_temp_free(tbase
);
10944 tcg_temp_free(toffset
);
10948 MIPS_INVAL("Compact branch/jump");
10949 generate_exception_end(ctx
, EXCP_RI
);
10953 if (bcond_compute
== 0) {
10954 /* Uncoditional compact branch */
10957 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10960 ctx
->hflags
|= MIPS_HFLAG_BR
;
10963 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10966 ctx
->hflags
|= MIPS_HFLAG_B
;
10969 MIPS_INVAL("Compact branch/jump");
10970 generate_exception_end(ctx
, EXCP_RI
);
10974 /* Generating branch here as compact branches don't have delay slot */
10975 gen_branch(ctx
, 4);
10977 /* Conditional compact branch */
10978 TCGLabel
*fs
= gen_new_label();
10979 save_cpu_state(ctx
, 0);
10982 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10983 if (rs
== 0 && rt
!= 0) {
10985 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10986 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10988 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10991 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10994 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10995 if (rs
== 0 && rt
!= 0) {
10997 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10998 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11000 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11003 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11006 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11007 if (rs
== 0 && rt
!= 0) {
11009 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11010 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11012 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11015 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11018 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11019 if (rs
== 0 && rt
!= 0) {
11021 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11022 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11024 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11027 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11030 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11031 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11033 /* OPC_BOVC, OPC_BNVC */
11034 TCGv t2
= tcg_temp_new();
11035 TCGv t3
= tcg_temp_new();
11036 TCGv t4
= tcg_temp_new();
11037 TCGv input_overflow
= tcg_temp_new();
11039 gen_load_gpr(t0
, rs
);
11040 gen_load_gpr(t1
, rt
);
11041 tcg_gen_ext32s_tl(t2
, t0
);
11042 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11043 tcg_gen_ext32s_tl(t3
, t1
);
11044 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11045 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11047 tcg_gen_add_tl(t4
, t2
, t3
);
11048 tcg_gen_ext32s_tl(t4
, t4
);
11049 tcg_gen_xor_tl(t2
, t2
, t3
);
11050 tcg_gen_xor_tl(t3
, t4
, t3
);
11051 tcg_gen_andc_tl(t2
, t3
, t2
);
11052 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11053 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11054 if (opc
== OPC_BOVC
) {
11056 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
11059 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
11061 tcg_temp_free(input_overflow
);
11065 } else if (rs
< rt
&& rs
== 0) {
11066 /* OPC_BEQZALC, OPC_BNEZALC */
11067 if (opc
== OPC_BEQZALC
) {
11069 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
11072 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
11075 /* OPC_BEQC, OPC_BNEC */
11076 if (opc
== OPC_BEQC
) {
11078 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
11081 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
11086 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
11089 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
11092 MIPS_INVAL("Compact conditional branch/jump");
11093 generate_exception_end(ctx
, EXCP_RI
);
11097 /* Generating branch here as compact branches don't have delay slot */
11098 gen_goto_tb(ctx
, 1, ctx
->btarget
);
11101 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
11109 /* ISA extensions (ASEs) */
11110 /* MIPS16 extension to MIPS32 */
11112 /* MIPS16 major opcodes */
11114 M16_OPC_ADDIUSP
= 0x00,
11115 M16_OPC_ADDIUPC
= 0x01,
11117 M16_OPC_JAL
= 0x03,
11118 M16_OPC_BEQZ
= 0x04,
11119 M16_OPC_BNEQZ
= 0x05,
11120 M16_OPC_SHIFT
= 0x06,
11122 M16_OPC_RRIA
= 0x08,
11123 M16_OPC_ADDIU8
= 0x09,
11124 M16_OPC_SLTI
= 0x0a,
11125 M16_OPC_SLTIU
= 0x0b,
11128 M16_OPC_CMPI
= 0x0e,
11132 M16_OPC_LWSP
= 0x12,
11134 M16_OPC_LBU
= 0x14,
11135 M16_OPC_LHU
= 0x15,
11136 M16_OPC_LWPC
= 0x16,
11137 M16_OPC_LWU
= 0x17,
11140 M16_OPC_SWSP
= 0x1a,
11142 M16_OPC_RRR
= 0x1c,
11144 M16_OPC_EXTEND
= 0x1e,
11148 /* I8 funct field */
11167 /* RR funct field */
11201 /* I64 funct field */
11209 I64_DADDIUPC
= 0x6,
11213 /* RR ry field for CNVT */
11215 RR_RY_CNVT_ZEB
= 0x0,
11216 RR_RY_CNVT_ZEH
= 0x1,
11217 RR_RY_CNVT_ZEW
= 0x2,
11218 RR_RY_CNVT_SEB
= 0x4,
11219 RR_RY_CNVT_SEH
= 0x5,
11220 RR_RY_CNVT_SEW
= 0x6,
11223 static int xlat (int r
)
11225 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11230 static void gen_mips16_save (DisasContext
*ctx
,
11231 int xsregs
, int aregs
,
11232 int do_ra
, int do_s0
, int do_s1
,
11235 TCGv t0
= tcg_temp_new();
11236 TCGv t1
= tcg_temp_new();
11237 TCGv t2
= tcg_temp_new();
11267 generate_exception_end(ctx
, EXCP_RI
);
11273 gen_base_offset_addr(ctx
, t0
, 29, 12);
11274 gen_load_gpr(t1
, 7);
11275 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11278 gen_base_offset_addr(ctx
, t0
, 29, 8);
11279 gen_load_gpr(t1
, 6);
11280 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11283 gen_base_offset_addr(ctx
, t0
, 29, 4);
11284 gen_load_gpr(t1
, 5);
11285 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11288 gen_base_offset_addr(ctx
, t0
, 29, 0);
11289 gen_load_gpr(t1
, 4);
11290 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11293 gen_load_gpr(t0
, 29);
11295 #define DECR_AND_STORE(reg) do { \
11296 tcg_gen_movi_tl(t2, -4); \
11297 gen_op_addr_add(ctx, t0, t0, t2); \
11298 gen_load_gpr(t1, reg); \
11299 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11303 DECR_AND_STORE(31);
11308 DECR_AND_STORE(30);
11311 DECR_AND_STORE(23);
11314 DECR_AND_STORE(22);
11317 DECR_AND_STORE(21);
11320 DECR_AND_STORE(20);
11323 DECR_AND_STORE(19);
11326 DECR_AND_STORE(18);
11330 DECR_AND_STORE(17);
11333 DECR_AND_STORE(16);
11363 generate_exception_end(ctx
, EXCP_RI
);
11379 #undef DECR_AND_STORE
11381 tcg_gen_movi_tl(t2
, -framesize
);
11382 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11388 static void gen_mips16_restore (DisasContext
*ctx
,
11389 int xsregs
, int aregs
,
11390 int do_ra
, int do_s0
, int do_s1
,
11394 TCGv t0
= tcg_temp_new();
11395 TCGv t1
= tcg_temp_new();
11396 TCGv t2
= tcg_temp_new();
11398 tcg_gen_movi_tl(t2
, framesize
);
11399 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11401 #define DECR_AND_LOAD(reg) do { \
11402 tcg_gen_movi_tl(t2, -4); \
11403 gen_op_addr_add(ctx, t0, t0, t2); \
11404 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11405 gen_store_gpr(t1, reg); \
11469 generate_exception_end(ctx
, EXCP_RI
);
11485 #undef DECR_AND_LOAD
11487 tcg_gen_movi_tl(t2
, framesize
);
11488 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11494 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11495 int is_64_bit
, int extended
)
11499 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11500 generate_exception_end(ctx
, EXCP_RI
);
11504 t0
= tcg_temp_new();
11506 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11507 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11509 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11515 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11518 TCGv_i32 t0
= tcg_const_i32(op
);
11519 TCGv t1
= tcg_temp_new();
11520 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11521 gen_helper_cache(cpu_env
, t1
, t0
);
11524 #if defined(TARGET_MIPS64)
11525 static void decode_i64_mips16 (DisasContext
*ctx
,
11526 int ry
, int funct
, int16_t offset
,
11531 check_insn(ctx
, ISA_MIPS3
);
11532 check_mips_64(ctx
);
11533 offset
= extended
? offset
: offset
<< 3;
11534 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11537 check_insn(ctx
, ISA_MIPS3
);
11538 check_mips_64(ctx
);
11539 offset
= extended
? offset
: offset
<< 3;
11540 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11543 check_insn(ctx
, ISA_MIPS3
);
11544 check_mips_64(ctx
);
11545 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11546 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11549 check_insn(ctx
, ISA_MIPS3
);
11550 check_mips_64(ctx
);
11551 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11552 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11555 check_insn(ctx
, ISA_MIPS3
);
11556 check_mips_64(ctx
);
11557 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11558 generate_exception_end(ctx
, EXCP_RI
);
11560 offset
= extended
? offset
: offset
<< 3;
11561 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11565 check_insn(ctx
, ISA_MIPS3
);
11566 check_mips_64(ctx
);
11567 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11568 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11571 check_insn(ctx
, ISA_MIPS3
);
11572 check_mips_64(ctx
);
11573 offset
= extended
? offset
: offset
<< 2;
11574 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11577 check_insn(ctx
, ISA_MIPS3
);
11578 check_mips_64(ctx
);
11579 offset
= extended
? offset
: offset
<< 2;
11580 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11586 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11588 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11589 int op
, rx
, ry
, funct
, sa
;
11590 int16_t imm
, offset
;
11592 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11593 op
= (ctx
->opcode
>> 11) & 0x1f;
11594 sa
= (ctx
->opcode
>> 22) & 0x1f;
11595 funct
= (ctx
->opcode
>> 8) & 0x7;
11596 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11597 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11598 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11599 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11600 | (ctx
->opcode
& 0x1f));
11602 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11605 case M16_OPC_ADDIUSP
:
11606 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11608 case M16_OPC_ADDIUPC
:
11609 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11612 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11613 /* No delay slot, so just process as a normal instruction */
11616 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11617 /* No delay slot, so just process as a normal instruction */
11619 case M16_OPC_BNEQZ
:
11620 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11621 /* No delay slot, so just process as a normal instruction */
11623 case M16_OPC_SHIFT
:
11624 switch (ctx
->opcode
& 0x3) {
11626 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11629 #if defined(TARGET_MIPS64)
11630 check_mips_64(ctx
);
11631 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11633 generate_exception_end(ctx
, EXCP_RI
);
11637 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11640 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11644 #if defined(TARGET_MIPS64)
11646 check_insn(ctx
, ISA_MIPS3
);
11647 check_mips_64(ctx
);
11648 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11652 imm
= ctx
->opcode
& 0xf;
11653 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11654 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11655 imm
= (int16_t) (imm
<< 1) >> 1;
11656 if ((ctx
->opcode
>> 4) & 0x1) {
11657 #if defined(TARGET_MIPS64)
11658 check_mips_64(ctx
);
11659 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11661 generate_exception_end(ctx
, EXCP_RI
);
11664 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11667 case M16_OPC_ADDIU8
:
11668 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11671 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11673 case M16_OPC_SLTIU
:
11674 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11679 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11682 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11685 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11688 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11691 check_insn(ctx
, ISA_MIPS32
);
11693 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11694 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11695 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11696 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11697 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11698 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11699 | (ctx
->opcode
& 0xf)) << 3;
11701 if (ctx
->opcode
& (1 << 7)) {
11702 gen_mips16_save(ctx
, xsregs
, aregs
,
11703 do_ra
, do_s0
, do_s1
,
11706 gen_mips16_restore(ctx
, xsregs
, aregs
,
11707 do_ra
, do_s0
, do_s1
,
11713 generate_exception_end(ctx
, EXCP_RI
);
11718 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11721 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11723 #if defined(TARGET_MIPS64)
11725 check_insn(ctx
, ISA_MIPS3
);
11726 check_mips_64(ctx
);
11727 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11731 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11734 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11737 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11740 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11743 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11746 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11749 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11751 #if defined(TARGET_MIPS64)
11753 check_insn(ctx
, ISA_MIPS3
);
11754 check_mips_64(ctx
);
11755 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11759 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11762 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11765 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11768 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11770 #if defined(TARGET_MIPS64)
11772 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11776 generate_exception_end(ctx
, EXCP_RI
);
11783 static inline bool is_uhi(int sdbbp_code
)
11785 #ifdef CONFIG_USER_ONLY
11788 return semihosting_enabled() && sdbbp_code
== 1;
11792 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11796 int op
, cnvt_op
, op1
, offset
;
11800 op
= (ctx
->opcode
>> 11) & 0x1f;
11801 sa
= (ctx
->opcode
>> 2) & 0x7;
11802 sa
= sa
== 0 ? 8 : sa
;
11803 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11804 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11805 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11806 op1
= offset
= ctx
->opcode
& 0x1f;
11811 case M16_OPC_ADDIUSP
:
11813 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11815 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11818 case M16_OPC_ADDIUPC
:
11819 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11822 offset
= (ctx
->opcode
& 0x7ff) << 1;
11823 offset
= (int16_t)(offset
<< 4) >> 4;
11824 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11825 /* No delay slot, so just process as a normal instruction */
11828 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11829 offset
= (((ctx
->opcode
& 0x1f) << 21)
11830 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11832 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11833 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11837 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11838 ((int8_t)ctx
->opcode
) << 1, 0);
11839 /* No delay slot, so just process as a normal instruction */
11841 case M16_OPC_BNEQZ
:
11842 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11843 ((int8_t)ctx
->opcode
) << 1, 0);
11844 /* No delay slot, so just process as a normal instruction */
11846 case M16_OPC_SHIFT
:
11847 switch (ctx
->opcode
& 0x3) {
11849 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11852 #if defined(TARGET_MIPS64)
11853 check_insn(ctx
, ISA_MIPS3
);
11854 check_mips_64(ctx
);
11855 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11857 generate_exception_end(ctx
, EXCP_RI
);
11861 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11864 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11868 #if defined(TARGET_MIPS64)
11870 check_insn(ctx
, ISA_MIPS3
);
11871 check_mips_64(ctx
);
11872 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11877 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11879 if ((ctx
->opcode
>> 4) & 1) {
11880 #if defined(TARGET_MIPS64)
11881 check_insn(ctx
, ISA_MIPS3
);
11882 check_mips_64(ctx
);
11883 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11885 generate_exception_end(ctx
, EXCP_RI
);
11888 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11892 case M16_OPC_ADDIU8
:
11894 int16_t imm
= (int8_t) ctx
->opcode
;
11896 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11901 int16_t imm
= (uint8_t) ctx
->opcode
;
11902 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11905 case M16_OPC_SLTIU
:
11907 int16_t imm
= (uint8_t) ctx
->opcode
;
11908 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11915 funct
= (ctx
->opcode
>> 8) & 0x7;
11918 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11919 ((int8_t)ctx
->opcode
) << 1, 0);
11922 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11923 ((int8_t)ctx
->opcode
) << 1, 0);
11926 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11929 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11930 ((int8_t)ctx
->opcode
) << 3);
11933 check_insn(ctx
, ISA_MIPS32
);
11935 int do_ra
= ctx
->opcode
& (1 << 6);
11936 int do_s0
= ctx
->opcode
& (1 << 5);
11937 int do_s1
= ctx
->opcode
& (1 << 4);
11938 int framesize
= ctx
->opcode
& 0xf;
11940 if (framesize
== 0) {
11943 framesize
= framesize
<< 3;
11946 if (ctx
->opcode
& (1 << 7)) {
11947 gen_mips16_save(ctx
, 0, 0,
11948 do_ra
, do_s0
, do_s1
, framesize
);
11950 gen_mips16_restore(ctx
, 0, 0,
11951 do_ra
, do_s0
, do_s1
, framesize
);
11957 int rz
= xlat(ctx
->opcode
& 0x7);
11959 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11960 ((ctx
->opcode
>> 5) & 0x7);
11961 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11965 reg32
= ctx
->opcode
& 0x1f;
11966 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11969 generate_exception_end(ctx
, EXCP_RI
);
11976 int16_t imm
= (uint8_t) ctx
->opcode
;
11978 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11983 int16_t imm
= (uint8_t) ctx
->opcode
;
11984 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11987 #if defined(TARGET_MIPS64)
11989 check_insn(ctx
, ISA_MIPS3
);
11990 check_mips_64(ctx
);
11991 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11995 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11998 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
12001 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12004 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
12007 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
12010 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
12013 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
12015 #if defined (TARGET_MIPS64)
12017 check_insn(ctx
, ISA_MIPS3
);
12018 check_mips_64(ctx
);
12019 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
12023 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
12026 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
12029 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
12032 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
12036 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
12039 switch (ctx
->opcode
& 0x3) {
12041 mips32_op
= OPC_ADDU
;
12044 mips32_op
= OPC_SUBU
;
12046 #if defined(TARGET_MIPS64)
12048 mips32_op
= OPC_DADDU
;
12049 check_insn(ctx
, ISA_MIPS3
);
12050 check_mips_64(ctx
);
12053 mips32_op
= OPC_DSUBU
;
12054 check_insn(ctx
, ISA_MIPS3
);
12055 check_mips_64(ctx
);
12059 generate_exception_end(ctx
, EXCP_RI
);
12063 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
12072 int nd
= (ctx
->opcode
>> 7) & 0x1;
12073 int link
= (ctx
->opcode
>> 6) & 0x1;
12074 int ra
= (ctx
->opcode
>> 5) & 0x1;
12077 check_insn(ctx
, ISA_MIPS32
);
12086 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
12091 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
12092 gen_helper_do_semihosting(cpu_env
);
12094 /* XXX: not clear which exception should be raised
12095 * when in debug mode...
12097 check_insn(ctx
, ISA_MIPS32
);
12098 generate_exception_end(ctx
, EXCP_DBp
);
12102 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
12105 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
12108 generate_exception_end(ctx
, EXCP_BREAK
);
12111 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
12114 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
12117 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
12119 #if defined (TARGET_MIPS64)
12121 check_insn(ctx
, ISA_MIPS3
);
12122 check_mips_64(ctx
);
12123 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
12127 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12130 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12133 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12136 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12139 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12142 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12145 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12148 check_insn(ctx
, ISA_MIPS32
);
12150 case RR_RY_CNVT_ZEB
:
12151 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12153 case RR_RY_CNVT_ZEH
:
12154 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12156 case RR_RY_CNVT_SEB
:
12157 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12159 case RR_RY_CNVT_SEH
:
12160 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12162 #if defined (TARGET_MIPS64)
12163 case RR_RY_CNVT_ZEW
:
12164 check_insn(ctx
, ISA_MIPS64
);
12165 check_mips_64(ctx
);
12166 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12168 case RR_RY_CNVT_SEW
:
12169 check_insn(ctx
, ISA_MIPS64
);
12170 check_mips_64(ctx
);
12171 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12175 generate_exception_end(ctx
, EXCP_RI
);
12180 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12182 #if defined (TARGET_MIPS64)
12184 check_insn(ctx
, ISA_MIPS3
);
12185 check_mips_64(ctx
);
12186 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12189 check_insn(ctx
, ISA_MIPS3
);
12190 check_mips_64(ctx
);
12191 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12194 check_insn(ctx
, ISA_MIPS3
);
12195 check_mips_64(ctx
);
12196 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12199 check_insn(ctx
, ISA_MIPS3
);
12200 check_mips_64(ctx
);
12201 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12205 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12208 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12211 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12214 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12216 #if defined (TARGET_MIPS64)
12218 check_insn(ctx
, ISA_MIPS3
);
12219 check_mips_64(ctx
);
12220 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12223 check_insn(ctx
, ISA_MIPS3
);
12224 check_mips_64(ctx
);
12225 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12228 check_insn(ctx
, ISA_MIPS3
);
12229 check_mips_64(ctx
);
12230 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12233 check_insn(ctx
, ISA_MIPS3
);
12234 check_mips_64(ctx
);
12235 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12239 generate_exception_end(ctx
, EXCP_RI
);
12243 case M16_OPC_EXTEND
:
12244 decode_extended_mips16_opc(env
, ctx
);
12247 #if defined(TARGET_MIPS64)
12249 funct
= (ctx
->opcode
>> 8) & 0x7;
12250 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12254 generate_exception_end(ctx
, EXCP_RI
);
12261 /* microMIPS extension to MIPS32/MIPS64 */
12264 * microMIPS32/microMIPS64 major opcodes
12266 * 1. MIPS Architecture for Programmers Volume II-B:
12267 * The microMIPS32 Instruction Set (Revision 3.05)
12269 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12271 * 2. MIPS Architecture For Programmers Volume II-A:
12272 * The MIPS64 Instruction Set (Revision 3.51)
12302 POOL32S
= 0x16, /* MIPS64 */
12303 DADDIU32
= 0x17, /* MIPS64 */
12332 /* 0x29 is reserved */
12345 /* 0x31 is reserved */
12358 SD32
= 0x36, /* MIPS64 */
12359 LD32
= 0x37, /* MIPS64 */
12361 /* 0x39 is reserved */
12377 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12387 /* POOL32A encoding of minor opcode field */
12390 /* These opcodes are distinguished only by bits 9..6; those bits are
12391 * what are recorded below. */
12428 /* The following can be distinguished by their lower 6 bits. */
12438 /* POOL32AXF encoding of minor opcode field extension */
12441 * 1. MIPS Architecture for Programmers Volume II-B:
12442 * The microMIPS32 Instruction Set (Revision 3.05)
12444 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12446 * 2. MIPS Architecture for Programmers VolumeIV-e:
12447 * The MIPS DSP Application-Specific Extension
12448 * to the microMIPS32 Architecture (Revision 2.34)
12450 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12465 /* begin of microMIPS32 DSP */
12467 /* bits 13..12 for 0x01 */
12473 /* bits 13..12 for 0x2a */
12479 /* bits 13..12 for 0x32 */
12483 /* end of microMIPS32 DSP */
12485 /* bits 15..12 for 0x2c */
12502 /* bits 15..12 for 0x34 */
12510 /* bits 15..12 for 0x3c */
12512 JR
= 0x0, /* alias */
12520 /* bits 15..12 for 0x05 */
12524 /* bits 15..12 for 0x0d */
12536 /* bits 15..12 for 0x15 */
12542 /* bits 15..12 for 0x1d */
12546 /* bits 15..12 for 0x2d */
12551 /* bits 15..12 for 0x35 */
12558 /* POOL32B encoding of minor opcode field (bits 15..12) */
12574 /* POOL32C encoding of minor opcode field (bits 15..12) */
12595 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
12608 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
12621 /* POOL32F encoding of minor opcode field (bits 5..0) */
12624 /* These are the bit 7..6 values */
12633 /* These are the bit 8..6 values */
12658 MOVZ_FMT_05
= 0x05,
12692 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12699 /* POOL32Fxf encoding of minor opcode extension field */
12737 /* POOL32I encoding of minor opcode field (bits 25..21) */
12767 /* These overlap and are distinguished by bit16 of the instruction */
12776 /* POOL16A encoding of minor opcode field */
12783 /* POOL16B encoding of minor opcode field */
12790 /* POOL16C encoding of minor opcode field */
12810 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12830 /* POOL16D encoding of minor opcode field */
12837 /* POOL16E encoding of minor opcode field */
12844 static int mmreg (int r
)
12846 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12851 /* Used for 16-bit store instructions. */
12852 static int mmreg2 (int r
)
12854 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12859 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12860 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12861 #define uMIPS_RS2(op) uMIPS_RS(op)
12862 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12863 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12864 #define uMIPS_RS5(op) (op & 0x1f)
12866 /* Signed immediate */
12867 #define SIMM(op, start, width) \
12868 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12871 /* Zero-extended immediate */
12872 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12874 static void gen_addiur1sp(DisasContext
*ctx
)
12876 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12878 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12881 static void gen_addiur2(DisasContext
*ctx
)
12883 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12884 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12885 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12887 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12890 static void gen_addiusp(DisasContext
*ctx
)
12892 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12895 if (encoded
<= 1) {
12896 decoded
= 256 + encoded
;
12897 } else if (encoded
<= 255) {
12899 } else if (encoded
<= 509) {
12900 decoded
= encoded
- 512;
12902 decoded
= encoded
- 768;
12905 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12908 static void gen_addius5(DisasContext
*ctx
)
12910 int imm
= SIMM(ctx
->opcode
, 1, 4);
12911 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12913 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12916 static void gen_andi16(DisasContext
*ctx
)
12918 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12919 31, 32, 63, 64, 255, 32768, 65535 };
12920 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12921 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12922 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12924 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12927 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12928 int base
, int16_t offset
)
12933 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12934 generate_exception_end(ctx
, EXCP_RI
);
12938 t0
= tcg_temp_new();
12940 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12942 t1
= tcg_const_tl(reglist
);
12943 t2
= tcg_const_i32(ctx
->mem_idx
);
12945 save_cpu_state(ctx
, 1);
12948 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12951 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12953 #ifdef TARGET_MIPS64
12955 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12958 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12964 tcg_temp_free_i32(t2
);
12968 static void gen_pool16c_insn(DisasContext
*ctx
)
12970 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12971 int rs
= mmreg(ctx
->opcode
& 0x7);
12973 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12978 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12984 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12990 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12996 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
13003 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13004 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13006 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
13015 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
13016 int offset
= ZIMM(ctx
->opcode
, 0, 4);
13018 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
13025 int reg
= ctx
->opcode
& 0x1f;
13027 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
13033 int reg
= ctx
->opcode
& 0x1f;
13034 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
13035 /* Let normal delay slot handling in our caller take us
13036 to the branch target. */
13041 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
13042 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13046 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
13047 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13051 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
13055 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
13058 generate_exception_end(ctx
, EXCP_BREAK
);
13061 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
13062 gen_helper_do_semihosting(cpu_env
);
13064 /* XXX: not clear which exception should be raised
13065 * when in debug mode...
13067 check_insn(ctx
, ISA_MIPS32
);
13068 generate_exception_end(ctx
, EXCP_DBp
);
13071 case JRADDIUSP
+ 0:
13072 case JRADDIUSP
+ 1:
13074 int imm
= ZIMM(ctx
->opcode
, 0, 5);
13075 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13076 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13077 /* Let normal delay slot handling in our caller take us
13078 to the branch target. */
13082 generate_exception_end(ctx
, EXCP_RI
);
13087 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
13090 int rd
, rs
, re
, rt
;
13091 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13092 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13093 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13094 rd
= rd_enc
[enc_dest
];
13095 re
= re_enc
[enc_dest
];
13096 rs
= rs_rt_enc
[enc_rs
];
13097 rt
= rs_rt_enc
[enc_rt
];
13099 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
13101 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
13104 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
13106 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
13110 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
13112 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
13113 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
13115 switch (ctx
->opcode
& 0xf) {
13117 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
13120 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
13124 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13125 int offset
= extract32(ctx
->opcode
, 4, 4);
13126 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
13129 case R6_JRC16
: /* JRCADDIUSP */
13130 if ((ctx
->opcode
>> 4) & 1) {
13132 int imm
= extract32(ctx
->opcode
, 5, 5);
13133 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
13134 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
13137 int rs
= extract32(ctx
->opcode
, 5, 5);
13138 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
13141 case MOVEP
... MOVEP_07
:
13142 case MOVEP_0C
... MOVEP_0F
:
13144 int enc_dest
= uMIPS_RD(ctx
->opcode
);
13145 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
13146 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
13147 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
13151 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
13154 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13158 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13159 int offset
= extract32(ctx
->opcode
, 4, 4);
13160 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13163 case JALRC16
: /* BREAK16, SDBBP16 */
13164 switch (ctx
->opcode
& 0x3f) {
13166 case JALRC16
+ 0x20:
13168 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13173 generate_exception(ctx
, EXCP_BREAK
);
13177 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13178 gen_helper_do_semihosting(cpu_env
);
13180 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13181 generate_exception(ctx
, EXCP_RI
);
13183 generate_exception(ctx
, EXCP_DBp
);
13190 generate_exception(ctx
, EXCP_RI
);
13195 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13197 TCGv t0
= tcg_temp_new();
13198 TCGv t1
= tcg_temp_new();
13200 gen_load_gpr(t0
, base
);
13203 gen_load_gpr(t1
, index
);
13204 tcg_gen_shli_tl(t1
, t1
, 2);
13205 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13208 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13209 gen_store_gpr(t1
, rd
);
13215 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13216 int base
, int16_t offset
)
13220 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13221 generate_exception_end(ctx
, EXCP_RI
);
13225 t0
= tcg_temp_new();
13226 t1
= tcg_temp_new();
13228 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13233 generate_exception_end(ctx
, EXCP_RI
);
13236 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13237 gen_store_gpr(t1
, rd
);
13238 tcg_gen_movi_tl(t1
, 4);
13239 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13240 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13241 gen_store_gpr(t1
, rd
+1);
13244 gen_load_gpr(t1
, rd
);
13245 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13246 tcg_gen_movi_tl(t1
, 4);
13247 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13248 gen_load_gpr(t1
, rd
+1);
13249 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13251 #ifdef TARGET_MIPS64
13254 generate_exception_end(ctx
, EXCP_RI
);
13257 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13258 gen_store_gpr(t1
, rd
);
13259 tcg_gen_movi_tl(t1
, 8);
13260 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13261 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13262 gen_store_gpr(t1
, rd
+1);
13265 gen_load_gpr(t1
, rd
);
13266 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13267 tcg_gen_movi_tl(t1
, 8);
13268 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13269 gen_load_gpr(t1
, rd
+1);
13270 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13278 static void gen_sync(int stype
)
13280 TCGBar tcg_mo
= TCG_BAR_SC
;
13283 case 0x4: /* SYNC_WMB */
13284 tcg_mo
|= TCG_MO_ST_ST
;
13286 case 0x10: /* SYNC_MB */
13287 tcg_mo
|= TCG_MO_ALL
;
13289 case 0x11: /* SYNC_ACQUIRE */
13290 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13292 case 0x12: /* SYNC_RELEASE */
13293 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13295 case 0x13: /* SYNC_RMB */
13296 tcg_mo
|= TCG_MO_LD_LD
;
13299 tcg_mo
|= TCG_MO_ALL
;
13303 tcg_gen_mb(tcg_mo
);
13306 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13308 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13309 int minor
= (ctx
->opcode
>> 12) & 0xf;
13310 uint32_t mips32_op
;
13312 switch (extension
) {
13314 mips32_op
= OPC_TEQ
;
13317 mips32_op
= OPC_TGE
;
13320 mips32_op
= OPC_TGEU
;
13323 mips32_op
= OPC_TLT
;
13326 mips32_op
= OPC_TLTU
;
13329 mips32_op
= OPC_TNE
;
13331 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13333 #ifndef CONFIG_USER_ONLY
13336 check_cp0_enabled(ctx
);
13338 /* Treat as NOP. */
13341 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13345 check_cp0_enabled(ctx
);
13347 TCGv t0
= tcg_temp_new();
13349 gen_load_gpr(t0
, rt
);
13350 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13356 switch (minor
& 3) {
13358 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13361 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13364 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13367 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13370 goto pool32axf_invalid
;
13374 switch (minor
& 3) {
13376 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13379 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13382 goto pool32axf_invalid
;
13388 check_insn(ctx
, ISA_MIPS32R6
);
13389 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13392 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13395 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13398 mips32_op
= OPC_CLO
;
13401 mips32_op
= OPC_CLZ
;
13403 check_insn(ctx
, ISA_MIPS32
);
13404 gen_cl(ctx
, mips32_op
, rt
, rs
);
13407 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13408 gen_rdhwr(ctx
, rt
, rs
, 0);
13411 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13414 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13415 mips32_op
= OPC_MULT
;
13418 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13419 mips32_op
= OPC_MULTU
;
13422 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13423 mips32_op
= OPC_DIV
;
13426 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13427 mips32_op
= OPC_DIVU
;
13430 check_insn(ctx
, ISA_MIPS32
);
13431 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13434 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13435 mips32_op
= OPC_MADD
;
13438 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13439 mips32_op
= OPC_MADDU
;
13442 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13443 mips32_op
= OPC_MSUB
;
13446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13447 mips32_op
= OPC_MSUBU
;
13449 check_insn(ctx
, ISA_MIPS32
);
13450 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13453 goto pool32axf_invalid
;
13464 generate_exception_err(ctx
, EXCP_CpU
, 2);
13467 goto pool32axf_invalid
;
13472 case JALR
: /* JALRC */
13473 case JALR_HB
: /* JALRC_HB */
13474 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13475 /* JALRC, JALRC_HB */
13476 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13478 /* JALR, JALR_HB */
13479 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13480 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13485 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13486 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13487 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13490 goto pool32axf_invalid
;
13496 check_cp0_enabled(ctx
);
13497 check_insn(ctx
, ISA_MIPS32R2
);
13498 gen_load_srsgpr(rs
, rt
);
13501 check_cp0_enabled(ctx
);
13502 check_insn(ctx
, ISA_MIPS32R2
);
13503 gen_store_srsgpr(rs
, rt
);
13506 goto pool32axf_invalid
;
13509 #ifndef CONFIG_USER_ONLY
13513 mips32_op
= OPC_TLBP
;
13516 mips32_op
= OPC_TLBR
;
13519 mips32_op
= OPC_TLBWI
;
13522 mips32_op
= OPC_TLBWR
;
13525 mips32_op
= OPC_TLBINV
;
13528 mips32_op
= OPC_TLBINVF
;
13531 mips32_op
= OPC_WAIT
;
13534 mips32_op
= OPC_DERET
;
13537 mips32_op
= OPC_ERET
;
13539 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13542 goto pool32axf_invalid
;
13548 check_cp0_enabled(ctx
);
13550 TCGv t0
= tcg_temp_new();
13552 save_cpu_state(ctx
, 1);
13553 gen_helper_di(t0
, cpu_env
);
13554 gen_store_gpr(t0
, rs
);
13555 /* Stop translation as we may have switched the execution mode */
13556 ctx
->bstate
= BS_STOP
;
13561 check_cp0_enabled(ctx
);
13563 TCGv t0
= tcg_temp_new();
13565 save_cpu_state(ctx
, 1);
13566 gen_helper_ei(t0
, cpu_env
);
13567 gen_store_gpr(t0
, rs
);
13568 /* Stop translation as we may have switched the execution mode */
13569 ctx
->bstate
= BS_STOP
;
13574 goto pool32axf_invalid
;
13581 gen_sync(extract32(ctx
->opcode
, 16, 5));
13584 generate_exception_end(ctx
, EXCP_SYSCALL
);
13587 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13588 gen_helper_do_semihosting(cpu_env
);
13590 check_insn(ctx
, ISA_MIPS32
);
13591 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13592 generate_exception_end(ctx
, EXCP_RI
);
13594 generate_exception_end(ctx
, EXCP_DBp
);
13599 goto pool32axf_invalid
;
13603 switch (minor
& 3) {
13605 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13608 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13611 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13614 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13617 goto pool32axf_invalid
;
13621 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13624 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13627 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13630 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13633 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13636 goto pool32axf_invalid
;
13641 MIPS_INVAL("pool32axf");
13642 generate_exception_end(ctx
, EXCP_RI
);
13647 /* Values for microMIPS fmt field. Variable-width, depending on which
13648 formats the instruction supports. */
13667 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13669 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13670 uint32_t mips32_op
;
13672 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13673 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13674 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13676 switch (extension
) {
13677 case FLOAT_1BIT_FMT(CFC1
, 0):
13678 mips32_op
= OPC_CFC1
;
13680 case FLOAT_1BIT_FMT(CTC1
, 0):
13681 mips32_op
= OPC_CTC1
;
13683 case FLOAT_1BIT_FMT(MFC1
, 0):
13684 mips32_op
= OPC_MFC1
;
13686 case FLOAT_1BIT_FMT(MTC1
, 0):
13687 mips32_op
= OPC_MTC1
;
13689 case FLOAT_1BIT_FMT(MFHC1
, 0):
13690 mips32_op
= OPC_MFHC1
;
13692 case FLOAT_1BIT_FMT(MTHC1
, 0):
13693 mips32_op
= OPC_MTHC1
;
13695 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13698 /* Reciprocal square root */
13699 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13700 mips32_op
= OPC_RSQRT_S
;
13702 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13703 mips32_op
= OPC_RSQRT_D
;
13707 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13708 mips32_op
= OPC_SQRT_S
;
13710 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13711 mips32_op
= OPC_SQRT_D
;
13715 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13716 mips32_op
= OPC_RECIP_S
;
13718 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13719 mips32_op
= OPC_RECIP_D
;
13723 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13724 mips32_op
= OPC_FLOOR_L_S
;
13726 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13727 mips32_op
= OPC_FLOOR_L_D
;
13729 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13730 mips32_op
= OPC_FLOOR_W_S
;
13732 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13733 mips32_op
= OPC_FLOOR_W_D
;
13737 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13738 mips32_op
= OPC_CEIL_L_S
;
13740 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13741 mips32_op
= OPC_CEIL_L_D
;
13743 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13744 mips32_op
= OPC_CEIL_W_S
;
13746 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13747 mips32_op
= OPC_CEIL_W_D
;
13751 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13752 mips32_op
= OPC_TRUNC_L_S
;
13754 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13755 mips32_op
= OPC_TRUNC_L_D
;
13757 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13758 mips32_op
= OPC_TRUNC_W_S
;
13760 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13761 mips32_op
= OPC_TRUNC_W_D
;
13765 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13766 mips32_op
= OPC_ROUND_L_S
;
13768 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13769 mips32_op
= OPC_ROUND_L_D
;
13771 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13772 mips32_op
= OPC_ROUND_W_S
;
13774 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13775 mips32_op
= OPC_ROUND_W_D
;
13778 /* Integer to floating-point conversion */
13779 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13780 mips32_op
= OPC_CVT_L_S
;
13782 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13783 mips32_op
= OPC_CVT_L_D
;
13785 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13786 mips32_op
= OPC_CVT_W_S
;
13788 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13789 mips32_op
= OPC_CVT_W_D
;
13792 /* Paired-foo conversions */
13793 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13794 mips32_op
= OPC_CVT_S_PL
;
13796 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13797 mips32_op
= OPC_CVT_S_PU
;
13799 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13800 mips32_op
= OPC_CVT_PW_PS
;
13802 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13803 mips32_op
= OPC_CVT_PS_PW
;
13806 /* Floating-point moves */
13807 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13808 mips32_op
= OPC_MOV_S
;
13810 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13811 mips32_op
= OPC_MOV_D
;
13813 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13814 mips32_op
= OPC_MOV_PS
;
13817 /* Absolute value */
13818 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13819 mips32_op
= OPC_ABS_S
;
13821 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13822 mips32_op
= OPC_ABS_D
;
13824 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13825 mips32_op
= OPC_ABS_PS
;
13829 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13830 mips32_op
= OPC_NEG_S
;
13832 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13833 mips32_op
= OPC_NEG_D
;
13835 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13836 mips32_op
= OPC_NEG_PS
;
13839 /* Reciprocal square root step */
13840 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13841 mips32_op
= OPC_RSQRT1_S
;
13843 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13844 mips32_op
= OPC_RSQRT1_D
;
13846 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13847 mips32_op
= OPC_RSQRT1_PS
;
13850 /* Reciprocal step */
13851 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13852 mips32_op
= OPC_RECIP1_S
;
13854 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13855 mips32_op
= OPC_RECIP1_S
;
13857 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13858 mips32_op
= OPC_RECIP1_PS
;
13861 /* Conversions from double */
13862 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13863 mips32_op
= OPC_CVT_D_S
;
13865 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13866 mips32_op
= OPC_CVT_D_W
;
13868 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13869 mips32_op
= OPC_CVT_D_L
;
13872 /* Conversions from single */
13873 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13874 mips32_op
= OPC_CVT_S_D
;
13876 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13877 mips32_op
= OPC_CVT_S_W
;
13879 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13880 mips32_op
= OPC_CVT_S_L
;
13882 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13885 /* Conditional moves on floating-point codes */
13886 case COND_FLOAT_MOV(MOVT
, 0):
13887 case COND_FLOAT_MOV(MOVT
, 1):
13888 case COND_FLOAT_MOV(MOVT
, 2):
13889 case COND_FLOAT_MOV(MOVT
, 3):
13890 case COND_FLOAT_MOV(MOVT
, 4):
13891 case COND_FLOAT_MOV(MOVT
, 5):
13892 case COND_FLOAT_MOV(MOVT
, 6):
13893 case COND_FLOAT_MOV(MOVT
, 7):
13894 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13895 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13897 case COND_FLOAT_MOV(MOVF
, 0):
13898 case COND_FLOAT_MOV(MOVF
, 1):
13899 case COND_FLOAT_MOV(MOVF
, 2):
13900 case COND_FLOAT_MOV(MOVF
, 3):
13901 case COND_FLOAT_MOV(MOVF
, 4):
13902 case COND_FLOAT_MOV(MOVF
, 5):
13903 case COND_FLOAT_MOV(MOVF
, 6):
13904 case COND_FLOAT_MOV(MOVF
, 7):
13905 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13906 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13909 MIPS_INVAL("pool32fxf");
13910 generate_exception_end(ctx
, EXCP_RI
);
13915 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13919 int rt
, rs
, rd
, rr
;
13921 uint32_t op
, minor
, minor2
, mips32_op
;
13922 uint32_t cond
, fmt
, cc
;
13924 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13925 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13927 rt
= (ctx
->opcode
>> 21) & 0x1f;
13928 rs
= (ctx
->opcode
>> 16) & 0x1f;
13929 rd
= (ctx
->opcode
>> 11) & 0x1f;
13930 rr
= (ctx
->opcode
>> 6) & 0x1f;
13931 imm
= (int16_t) ctx
->opcode
;
13933 op
= (ctx
->opcode
>> 26) & 0x3f;
13936 minor
= ctx
->opcode
& 0x3f;
13939 minor
= (ctx
->opcode
>> 6) & 0xf;
13942 mips32_op
= OPC_SLL
;
13945 mips32_op
= OPC_SRA
;
13948 mips32_op
= OPC_SRL
;
13951 mips32_op
= OPC_ROTR
;
13953 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13956 check_insn(ctx
, ISA_MIPS32R6
);
13957 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13960 check_insn(ctx
, ISA_MIPS32R6
);
13961 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13964 check_insn(ctx
, ISA_MIPS32R6
);
13965 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13968 goto pool32a_invalid
;
13972 minor
= (ctx
->opcode
>> 6) & 0xf;
13976 mips32_op
= OPC_ADD
;
13979 mips32_op
= OPC_ADDU
;
13982 mips32_op
= OPC_SUB
;
13985 mips32_op
= OPC_SUBU
;
13988 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13989 mips32_op
= OPC_MUL
;
13991 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13995 mips32_op
= OPC_SLLV
;
13998 mips32_op
= OPC_SRLV
;
14001 mips32_op
= OPC_SRAV
;
14004 mips32_op
= OPC_ROTRV
;
14006 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
14008 /* Logical operations */
14010 mips32_op
= OPC_AND
;
14013 mips32_op
= OPC_OR
;
14016 mips32_op
= OPC_NOR
;
14019 mips32_op
= OPC_XOR
;
14021 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
14023 /* Set less than */
14025 mips32_op
= OPC_SLT
;
14028 mips32_op
= OPC_SLTU
;
14030 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
14033 goto pool32a_invalid
;
14037 minor
= (ctx
->opcode
>> 6) & 0xf;
14039 /* Conditional moves */
14040 case MOVN
: /* MUL */
14041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14043 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
14046 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
14049 case MOVZ
: /* MUH */
14050 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14052 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
14055 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
14059 check_insn(ctx
, ISA_MIPS32R6
);
14060 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
14063 check_insn(ctx
, ISA_MIPS32R6
);
14064 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
14066 case LWXS
: /* DIV */
14067 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14069 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
14072 gen_ldxs(ctx
, rs
, rt
, rd
);
14076 check_insn(ctx
, ISA_MIPS32R6
);
14077 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
14080 check_insn(ctx
, ISA_MIPS32R6
);
14081 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
14084 check_insn(ctx
, ISA_MIPS32R6
);
14085 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
14088 goto pool32a_invalid
;
14092 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
14095 check_insn(ctx
, ISA_MIPS32R6
);
14096 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
14097 extract32(ctx
->opcode
, 9, 2));
14100 check_insn(ctx
, ISA_MIPS32R6
);
14101 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
14102 extract32(ctx
->opcode
, 9, 2));
14105 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
14108 gen_pool32axf(env
, ctx
, rt
, rs
);
14111 generate_exception_end(ctx
, EXCP_BREAK
);
14114 check_insn(ctx
, ISA_MIPS32R6
);
14115 generate_exception_end(ctx
, EXCP_RI
);
14119 MIPS_INVAL("pool32a");
14120 generate_exception_end(ctx
, EXCP_RI
);
14125 minor
= (ctx
->opcode
>> 12) & 0xf;
14128 check_cp0_enabled(ctx
);
14129 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14130 gen_cache_operation(ctx
, rt
, rs
, imm
);
14135 /* COP2: Not implemented. */
14136 generate_exception_err(ctx
, EXCP_CpU
, 2);
14138 #ifdef TARGET_MIPS64
14141 check_insn(ctx
, ISA_MIPS3
);
14142 check_mips_64(ctx
);
14147 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14149 #ifdef TARGET_MIPS64
14152 check_insn(ctx
, ISA_MIPS3
);
14153 check_mips_64(ctx
);
14158 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14161 MIPS_INVAL("pool32b");
14162 generate_exception_end(ctx
, EXCP_RI
);
14167 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14168 minor
= ctx
->opcode
& 0x3f;
14169 check_cp1_enabled(ctx
);
14172 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14173 mips32_op
= OPC_ALNV_PS
;
14176 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14177 mips32_op
= OPC_MADD_S
;
14180 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14181 mips32_op
= OPC_MADD_D
;
14184 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14185 mips32_op
= OPC_MADD_PS
;
14188 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14189 mips32_op
= OPC_MSUB_S
;
14192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14193 mips32_op
= OPC_MSUB_D
;
14196 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14197 mips32_op
= OPC_MSUB_PS
;
14200 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14201 mips32_op
= OPC_NMADD_S
;
14204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14205 mips32_op
= OPC_NMADD_D
;
14208 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14209 mips32_op
= OPC_NMADD_PS
;
14212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14213 mips32_op
= OPC_NMSUB_S
;
14216 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14217 mips32_op
= OPC_NMSUB_D
;
14220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14221 mips32_op
= OPC_NMSUB_PS
;
14223 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14225 case CABS_COND_FMT
:
14226 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14227 cond
= (ctx
->opcode
>> 6) & 0xf;
14228 cc
= (ctx
->opcode
>> 13) & 0x7;
14229 fmt
= (ctx
->opcode
>> 10) & 0x3;
14232 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14235 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14238 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14241 goto pool32f_invalid
;
14245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14246 cond
= (ctx
->opcode
>> 6) & 0xf;
14247 cc
= (ctx
->opcode
>> 13) & 0x7;
14248 fmt
= (ctx
->opcode
>> 10) & 0x3;
14251 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14254 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14257 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14260 goto pool32f_invalid
;
14264 check_insn(ctx
, ISA_MIPS32R6
);
14265 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14268 check_insn(ctx
, ISA_MIPS32R6
);
14269 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14272 gen_pool32fxf(ctx
, rt
, rs
);
14276 switch ((ctx
->opcode
>> 6) & 0x7) {
14278 mips32_op
= OPC_PLL_PS
;
14281 mips32_op
= OPC_PLU_PS
;
14284 mips32_op
= OPC_PUL_PS
;
14287 mips32_op
= OPC_PUU_PS
;
14290 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14291 mips32_op
= OPC_CVT_PS_S
;
14293 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14296 goto pool32f_invalid
;
14300 check_insn(ctx
, ISA_MIPS32R6
);
14301 switch ((ctx
->opcode
>> 9) & 0x3) {
14303 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14306 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14309 goto pool32f_invalid
;
14314 switch ((ctx
->opcode
>> 6) & 0x7) {
14316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14317 mips32_op
= OPC_LWXC1
;
14320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14321 mips32_op
= OPC_SWXC1
;
14324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14325 mips32_op
= OPC_LDXC1
;
14328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14329 mips32_op
= OPC_SDXC1
;
14332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14333 mips32_op
= OPC_LUXC1
;
14336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14337 mips32_op
= OPC_SUXC1
;
14339 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14342 goto pool32f_invalid
;
14346 check_insn(ctx
, ISA_MIPS32R6
);
14347 switch ((ctx
->opcode
>> 9) & 0x3) {
14349 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14352 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14355 goto pool32f_invalid
;
14360 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14361 fmt
= (ctx
->opcode
>> 9) & 0x3;
14362 switch ((ctx
->opcode
>> 6) & 0x7) {
14366 mips32_op
= OPC_RSQRT2_S
;
14369 mips32_op
= OPC_RSQRT2_D
;
14372 mips32_op
= OPC_RSQRT2_PS
;
14375 goto pool32f_invalid
;
14381 mips32_op
= OPC_RECIP2_S
;
14384 mips32_op
= OPC_RECIP2_D
;
14387 mips32_op
= OPC_RECIP2_PS
;
14390 goto pool32f_invalid
;
14394 mips32_op
= OPC_ADDR_PS
;
14397 mips32_op
= OPC_MULR_PS
;
14399 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14402 goto pool32f_invalid
;
14406 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14407 cc
= (ctx
->opcode
>> 13) & 0x7;
14408 fmt
= (ctx
->opcode
>> 9) & 0x3;
14409 switch ((ctx
->opcode
>> 6) & 0x7) {
14410 case MOVF_FMT
: /* RINT_FMT */
14411 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14415 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14418 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14421 goto pool32f_invalid
;
14427 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14430 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14434 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14437 goto pool32f_invalid
;
14441 case MOVT_FMT
: /* CLASS_FMT */
14442 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14446 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14449 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14452 goto pool32f_invalid
;
14458 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14461 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14465 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14468 goto pool32f_invalid
;
14473 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14476 goto pool32f_invalid
;
14479 #define FINSN_3ARG_SDPS(prfx) \
14480 switch ((ctx->opcode >> 8) & 0x3) { \
14482 mips32_op = OPC_##prfx##_S; \
14485 mips32_op = OPC_##prfx##_D; \
14487 case FMT_SDPS_PS: \
14489 mips32_op = OPC_##prfx##_PS; \
14492 goto pool32f_invalid; \
14495 check_insn(ctx
, ISA_MIPS32R6
);
14496 switch ((ctx
->opcode
>> 9) & 0x3) {
14498 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14501 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14504 goto pool32f_invalid
;
14508 check_insn(ctx
, ISA_MIPS32R6
);
14509 switch ((ctx
->opcode
>> 9) & 0x3) {
14511 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14514 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14517 goto pool32f_invalid
;
14521 /* regular FP ops */
14522 switch ((ctx
->opcode
>> 6) & 0x3) {
14524 FINSN_3ARG_SDPS(ADD
);
14527 FINSN_3ARG_SDPS(SUB
);
14530 FINSN_3ARG_SDPS(MUL
);
14533 fmt
= (ctx
->opcode
>> 8) & 0x3;
14535 mips32_op
= OPC_DIV_D
;
14536 } else if (fmt
== 0) {
14537 mips32_op
= OPC_DIV_S
;
14539 goto pool32f_invalid
;
14543 goto pool32f_invalid
;
14548 switch ((ctx
->opcode
>> 6) & 0x7) {
14549 case MOVN_FMT
: /* SELNEZ_FMT */
14550 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14552 switch ((ctx
->opcode
>> 9) & 0x3) {
14554 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14557 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14560 goto pool32f_invalid
;
14564 FINSN_3ARG_SDPS(MOVN
);
14568 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14569 FINSN_3ARG_SDPS(MOVN
);
14571 case MOVZ_FMT
: /* SELEQZ_FMT */
14572 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14574 switch ((ctx
->opcode
>> 9) & 0x3) {
14576 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14579 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14582 goto pool32f_invalid
;
14586 FINSN_3ARG_SDPS(MOVZ
);
14590 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14591 FINSN_3ARG_SDPS(MOVZ
);
14594 check_insn(ctx
, ISA_MIPS32R6
);
14595 switch ((ctx
->opcode
>> 9) & 0x3) {
14597 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14600 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14603 goto pool32f_invalid
;
14607 check_insn(ctx
, ISA_MIPS32R6
);
14608 switch ((ctx
->opcode
>> 9) & 0x3) {
14610 mips32_op
= OPC_MADDF_S
;
14613 mips32_op
= OPC_MADDF_D
;
14616 goto pool32f_invalid
;
14620 check_insn(ctx
, ISA_MIPS32R6
);
14621 switch ((ctx
->opcode
>> 9) & 0x3) {
14623 mips32_op
= OPC_MSUBF_S
;
14626 mips32_op
= OPC_MSUBF_D
;
14629 goto pool32f_invalid
;
14633 goto pool32f_invalid
;
14637 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14641 MIPS_INVAL("pool32f");
14642 generate_exception_end(ctx
, EXCP_RI
);
14646 generate_exception_err(ctx
, EXCP_CpU
, 1);
14650 minor
= (ctx
->opcode
>> 21) & 0x1f;
14653 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14654 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14657 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14658 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14659 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14662 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14663 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14664 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14667 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14668 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14671 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14672 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14673 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14676 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14677 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14678 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14681 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14682 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14685 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14686 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14690 case TLTI
: /* BC1EQZC */
14691 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14693 check_cp1_enabled(ctx
);
14694 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14697 mips32_op
= OPC_TLTI
;
14701 case TGEI
: /* BC1NEZC */
14702 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14704 check_cp1_enabled(ctx
);
14705 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14708 mips32_op
= OPC_TGEI
;
14713 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14714 mips32_op
= OPC_TLTIU
;
14717 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14718 mips32_op
= OPC_TGEIU
;
14720 case TNEI
: /* SYNCI */
14721 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14723 /* Break the TB to be able to sync copied instructions
14725 ctx
->bstate
= BS_STOP
;
14728 mips32_op
= OPC_TNEI
;
14733 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14734 mips32_op
= OPC_TEQI
;
14736 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14741 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14742 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14743 4, rs
, 0, imm
<< 1, 0);
14744 /* Compact branches don't have a delay slot, so just let
14745 the normal delay slot handling take us to the branch
14749 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14750 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14753 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14754 /* Break the TB to be able to sync copied instructions
14756 ctx
->bstate
= BS_STOP
;
14760 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14761 /* COP2: Not implemented. */
14762 generate_exception_err(ctx
, EXCP_CpU
, 2);
14765 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14766 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14770 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14774 mips32_op
= OPC_BC1FANY4
;
14777 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14778 mips32_op
= OPC_BC1TANY4
;
14781 check_insn(ctx
, ASE_MIPS3D
);
14784 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14785 check_cp1_enabled(ctx
);
14786 gen_compute_branch1(ctx
, mips32_op
,
14787 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14789 generate_exception_err(ctx
, EXCP_CpU
, 1);
14794 /* MIPS DSP: not implemented */
14797 MIPS_INVAL("pool32i");
14798 generate_exception_end(ctx
, EXCP_RI
);
14803 minor
= (ctx
->opcode
>> 12) & 0xf;
14804 offset
= sextract32(ctx
->opcode
, 0,
14805 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14808 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14809 mips32_op
= OPC_LWL
;
14812 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14813 mips32_op
= OPC_SWL
;
14816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14817 mips32_op
= OPC_LWR
;
14820 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14821 mips32_op
= OPC_SWR
;
14823 #if defined(TARGET_MIPS64)
14825 check_insn(ctx
, ISA_MIPS3
);
14826 check_mips_64(ctx
);
14827 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14828 mips32_op
= OPC_LDL
;
14831 check_insn(ctx
, ISA_MIPS3
);
14832 check_mips_64(ctx
);
14833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14834 mips32_op
= OPC_SDL
;
14837 check_insn(ctx
, ISA_MIPS3
);
14838 check_mips_64(ctx
);
14839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14840 mips32_op
= OPC_LDR
;
14843 check_insn(ctx
, ISA_MIPS3
);
14844 check_mips_64(ctx
);
14845 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14846 mips32_op
= OPC_SDR
;
14849 check_insn(ctx
, ISA_MIPS3
);
14850 check_mips_64(ctx
);
14851 mips32_op
= OPC_LWU
;
14854 check_insn(ctx
, ISA_MIPS3
);
14855 check_mips_64(ctx
);
14856 mips32_op
= OPC_LLD
;
14860 mips32_op
= OPC_LL
;
14863 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14866 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
14869 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14871 #if defined(TARGET_MIPS64)
14873 check_insn(ctx
, ISA_MIPS3
);
14874 check_mips_64(ctx
);
14875 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14880 MIPS_INVAL("pool32c ld-eva");
14881 generate_exception_end(ctx
, EXCP_RI
);
14884 check_cp0_enabled(ctx
);
14886 minor2
= (ctx
->opcode
>> 9) & 0x7;
14887 offset
= sextract32(ctx
->opcode
, 0, 9);
14890 mips32_op
= OPC_LBUE
;
14893 mips32_op
= OPC_LHUE
;
14896 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14897 mips32_op
= OPC_LWLE
;
14900 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14901 mips32_op
= OPC_LWRE
;
14904 mips32_op
= OPC_LBE
;
14907 mips32_op
= OPC_LHE
;
14910 mips32_op
= OPC_LLE
;
14913 mips32_op
= OPC_LWE
;
14919 MIPS_INVAL("pool32c st-eva");
14920 generate_exception_end(ctx
, EXCP_RI
);
14923 check_cp0_enabled(ctx
);
14925 minor2
= (ctx
->opcode
>> 9) & 0x7;
14926 offset
= sextract32(ctx
->opcode
, 0, 9);
14929 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14930 mips32_op
= OPC_SWLE
;
14933 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14934 mips32_op
= OPC_SWRE
;
14937 /* Treat as no-op */
14938 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14939 /* hint codes 24-31 are reserved and signal RI */
14940 generate_exception(ctx
, EXCP_RI
);
14944 /* Treat as no-op */
14945 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14946 gen_cache_operation(ctx
, rt
, rs
, offset
);
14950 mips32_op
= OPC_SBE
;
14953 mips32_op
= OPC_SHE
;
14956 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
14959 mips32_op
= OPC_SWE
;
14964 /* Treat as no-op */
14965 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14966 /* hint codes 24-31 are reserved and signal RI */
14967 generate_exception(ctx
, EXCP_RI
);
14971 MIPS_INVAL("pool32c");
14972 generate_exception_end(ctx
, EXCP_RI
);
14976 case ADDI32
: /* AUI, LUI */
14977 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14979 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14982 mips32_op
= OPC_ADDI
;
14987 mips32_op
= OPC_ADDIU
;
14989 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14992 /* Logical operations */
14994 mips32_op
= OPC_ORI
;
14997 mips32_op
= OPC_XORI
;
15000 mips32_op
= OPC_ANDI
;
15002 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15005 /* Set less than immediate */
15007 mips32_op
= OPC_SLTI
;
15010 mips32_op
= OPC_SLTIU
;
15012 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
15015 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15016 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15017 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
15018 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15020 case JALS32
: /* BOVC, BEQC, BEQZALC */
15021 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15024 mips32_op
= OPC_BOVC
;
15025 } else if (rs
< rt
&& rs
== 0) {
15027 mips32_op
= OPC_BEQZALC
;
15030 mips32_op
= OPC_BEQC
;
15032 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15035 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
15036 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
15037 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15040 case BEQ32
: /* BC */
15041 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15043 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
15044 sextract32(ctx
->opcode
<< 1, 0, 27));
15047 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
15050 case BNE32
: /* BALC */
15051 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15053 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
15054 sextract32(ctx
->opcode
<< 1, 0, 27));
15057 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
15060 case J32
: /* BGTZC, BLTZC, BLTC */
15061 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15062 if (rs
== 0 && rt
!= 0) {
15064 mips32_op
= OPC_BGTZC
;
15065 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15067 mips32_op
= OPC_BLTZC
;
15070 mips32_op
= OPC_BLTC
;
15072 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15075 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
15076 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15079 case JAL32
: /* BLEZC, BGEZC, BGEC */
15080 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15081 if (rs
== 0 && rt
!= 0) {
15083 mips32_op
= OPC_BLEZC
;
15084 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15086 mips32_op
= OPC_BGEZC
;
15089 mips32_op
= OPC_BGEC
;
15091 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15094 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
15095 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
15096 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15099 /* Floating point (COP1) */
15101 mips32_op
= OPC_LWC1
;
15104 mips32_op
= OPC_LDC1
;
15107 mips32_op
= OPC_SWC1
;
15110 mips32_op
= OPC_SDC1
;
15112 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
15114 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15115 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15116 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15117 switch ((ctx
->opcode
>> 16) & 0x1f) {
15118 case ADDIUPC_00
... ADDIUPC_07
:
15119 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
15122 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
15125 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
15127 case LWPC_08
... LWPC_0F
:
15128 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
15131 generate_exception(ctx
, EXCP_RI
);
15136 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
15137 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
15139 gen_addiupc(ctx
, reg
, offset
, 0, 0);
15142 case BNVC
: /* BNEC, BNEZALC */
15143 check_insn(ctx
, ISA_MIPS32R6
);
15146 mips32_op
= OPC_BNVC
;
15147 } else if (rs
< rt
&& rs
== 0) {
15149 mips32_op
= OPC_BNEZALC
;
15152 mips32_op
= OPC_BNEC
;
15154 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15156 case R6_BNEZC
: /* JIALC */
15157 check_insn(ctx
, ISA_MIPS32R6
);
15160 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
15161 sextract32(ctx
->opcode
<< 1, 0, 22));
15164 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
15167 case R6_BEQZC
: /* JIC */
15168 check_insn(ctx
, ISA_MIPS32R6
);
15171 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
15172 sextract32(ctx
->opcode
<< 1, 0, 22));
15175 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
15178 case BLEZALC
: /* BGEZALC, BGEUC */
15179 check_insn(ctx
, ISA_MIPS32R6
);
15180 if (rs
== 0 && rt
!= 0) {
15182 mips32_op
= OPC_BLEZALC
;
15183 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15185 mips32_op
= OPC_BGEZALC
;
15188 mips32_op
= OPC_BGEUC
;
15190 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15192 case BGTZALC
: /* BLTZALC, BLTUC */
15193 check_insn(ctx
, ISA_MIPS32R6
);
15194 if (rs
== 0 && rt
!= 0) {
15196 mips32_op
= OPC_BGTZALC
;
15197 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
15199 mips32_op
= OPC_BLTZALC
;
15202 mips32_op
= OPC_BLTUC
;
15204 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
15206 /* Loads and stores */
15208 mips32_op
= OPC_LB
;
15211 mips32_op
= OPC_LBU
;
15214 mips32_op
= OPC_LH
;
15217 mips32_op
= OPC_LHU
;
15220 mips32_op
= OPC_LW
;
15222 #ifdef TARGET_MIPS64
15224 check_insn(ctx
, ISA_MIPS3
);
15225 check_mips_64(ctx
);
15226 mips32_op
= OPC_LD
;
15229 check_insn(ctx
, ISA_MIPS3
);
15230 check_mips_64(ctx
);
15231 mips32_op
= OPC_SD
;
15235 mips32_op
= OPC_SB
;
15238 mips32_op
= OPC_SH
;
15241 mips32_op
= OPC_SW
;
15244 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15247 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15250 generate_exception_end(ctx
, EXCP_RI
);
15255 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15259 /* make sure instructions are on a halfword boundary */
15260 if (ctx
->pc
& 0x1) {
15261 env
->CP0_BadVAddr
= ctx
->pc
;
15262 generate_exception_end(ctx
, EXCP_AdEL
);
15266 op
= (ctx
->opcode
>> 10) & 0x3f;
15267 /* Enforce properly-sized instructions in a delay slot */
15268 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15269 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15271 /* POOL32A, POOL32B, POOL32I, POOL32C */
15273 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15275 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15277 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15279 /* LB32, LH32, LWC132, LDC132, LW32 */
15280 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15281 generate_exception_end(ctx
, EXCP_RI
);
15286 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15288 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15290 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15291 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15292 generate_exception_end(ctx
, EXCP_RI
);
15302 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15303 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15304 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15307 switch (ctx
->opcode
& 0x1) {
15315 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15316 /* In the Release 6 the register number location in
15317 * the instruction encoding has changed.
15319 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15321 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15327 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15328 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15329 int amount
= (ctx
->opcode
>> 1) & 0x7;
15331 amount
= amount
== 0 ? 8 : amount
;
15333 switch (ctx
->opcode
& 0x1) {
15342 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15346 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15347 gen_pool16c_r6_insn(ctx
);
15349 gen_pool16c_insn(ctx
);
15354 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15355 int rb
= 28; /* GP */
15356 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15358 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15363 if (ctx
->opcode
& 1) {
15364 generate_exception_end(ctx
, EXCP_RI
);
15367 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15368 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15369 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15370 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15375 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15376 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15377 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15378 offset
= (offset
== 0xf ? -1 : offset
);
15380 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15385 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15386 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15387 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15389 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15394 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15395 int rb
= 29; /* SP */
15396 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15398 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15403 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15404 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15405 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15407 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15412 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15413 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15414 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15416 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15421 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15422 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15423 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15425 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15430 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15431 int rb
= 29; /* SP */
15432 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15434 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15439 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15440 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15441 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15443 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15448 int rd
= uMIPS_RD5(ctx
->opcode
);
15449 int rs
= uMIPS_RS5(ctx
->opcode
);
15451 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15458 switch (ctx
->opcode
& 0x1) {
15468 switch (ctx
->opcode
& 0x1) {
15473 gen_addiur1sp(ctx
);
15477 case B16
: /* BC16 */
15478 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15479 sextract32(ctx
->opcode
, 0, 10) << 1,
15480 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15482 case BNEZ16
: /* BNEZC16 */
15483 case BEQZ16
: /* BEQZC16 */
15484 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15485 mmreg(uMIPS_RD(ctx
->opcode
)),
15486 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15487 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15492 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15493 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15495 imm
= (imm
== 0x7f ? -1 : imm
);
15496 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15502 generate_exception_end(ctx
, EXCP_RI
);
15505 decode_micromips32_opc(env
, ctx
);
15512 /* SmartMIPS extension to MIPS32 */
15514 #if defined(TARGET_MIPS64)
15516 /* MDMX extension to MIPS64 */
15520 /* MIPSDSP functions. */
15521 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15522 int rd
, int base
, int offset
)
15527 t0
= tcg_temp_new();
15530 gen_load_gpr(t0
, offset
);
15531 } else if (offset
== 0) {
15532 gen_load_gpr(t0
, base
);
15534 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15539 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15540 gen_store_gpr(t0
, rd
);
15543 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15544 gen_store_gpr(t0
, rd
);
15547 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15548 gen_store_gpr(t0
, rd
);
15550 #if defined(TARGET_MIPS64)
15552 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15553 gen_store_gpr(t0
, rd
);
15560 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15561 int ret
, int v1
, int v2
)
15567 /* Treat as NOP. */
15571 v1_t
= tcg_temp_new();
15572 v2_t
= tcg_temp_new();
15574 gen_load_gpr(v1_t
, v1
);
15575 gen_load_gpr(v2_t
, v2
);
15578 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15579 case OPC_MULT_G_2E
:
15583 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15585 case OPC_ADDUH_R_QB
:
15586 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15589 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15591 case OPC_ADDQH_R_PH
:
15592 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15595 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15597 case OPC_ADDQH_R_W
:
15598 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15601 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15603 case OPC_SUBUH_R_QB
:
15604 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15607 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15609 case OPC_SUBQH_R_PH
:
15610 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15613 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15615 case OPC_SUBQH_R_W
:
15616 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15620 case OPC_ABSQ_S_PH_DSP
:
15622 case OPC_ABSQ_S_QB
:
15624 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15626 case OPC_ABSQ_S_PH
:
15628 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15632 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15634 case OPC_PRECEQ_W_PHL
:
15636 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15637 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15639 case OPC_PRECEQ_W_PHR
:
15641 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15642 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15643 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15645 case OPC_PRECEQU_PH_QBL
:
15647 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15649 case OPC_PRECEQU_PH_QBR
:
15651 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15653 case OPC_PRECEQU_PH_QBLA
:
15655 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15657 case OPC_PRECEQU_PH_QBRA
:
15659 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15661 case OPC_PRECEU_PH_QBL
:
15663 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15665 case OPC_PRECEU_PH_QBR
:
15667 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15669 case OPC_PRECEU_PH_QBLA
:
15671 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15673 case OPC_PRECEU_PH_QBRA
:
15675 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15679 case OPC_ADDU_QB_DSP
:
15683 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15685 case OPC_ADDQ_S_PH
:
15687 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15691 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15695 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15697 case OPC_ADDU_S_QB
:
15699 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15703 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15705 case OPC_ADDU_S_PH
:
15707 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15711 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15713 case OPC_SUBQ_S_PH
:
15715 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15719 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15723 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15725 case OPC_SUBU_S_QB
:
15727 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15731 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15733 case OPC_SUBU_S_PH
:
15735 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15739 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15743 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15747 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15749 case OPC_RADDU_W_QB
:
15751 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15755 case OPC_CMPU_EQ_QB_DSP
:
15757 case OPC_PRECR_QB_PH
:
15759 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15761 case OPC_PRECRQ_QB_PH
:
15763 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15765 case OPC_PRECR_SRA_PH_W
:
15768 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15769 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15771 tcg_temp_free_i32(sa_t
);
15774 case OPC_PRECR_SRA_R_PH_W
:
15777 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15778 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15780 tcg_temp_free_i32(sa_t
);
15783 case OPC_PRECRQ_PH_W
:
15785 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15787 case OPC_PRECRQ_RS_PH_W
:
15789 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15791 case OPC_PRECRQU_S_QB_PH
:
15793 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15797 #ifdef TARGET_MIPS64
15798 case OPC_ABSQ_S_QH_DSP
:
15800 case OPC_PRECEQ_L_PWL
:
15802 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15804 case OPC_PRECEQ_L_PWR
:
15806 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15808 case OPC_PRECEQ_PW_QHL
:
15810 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15812 case OPC_PRECEQ_PW_QHR
:
15814 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15816 case OPC_PRECEQ_PW_QHLA
:
15818 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15820 case OPC_PRECEQ_PW_QHRA
:
15822 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15824 case OPC_PRECEQU_QH_OBL
:
15826 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15828 case OPC_PRECEQU_QH_OBR
:
15830 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15832 case OPC_PRECEQU_QH_OBLA
:
15834 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15836 case OPC_PRECEQU_QH_OBRA
:
15838 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15840 case OPC_PRECEU_QH_OBL
:
15842 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15844 case OPC_PRECEU_QH_OBR
:
15846 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15848 case OPC_PRECEU_QH_OBLA
:
15850 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15852 case OPC_PRECEU_QH_OBRA
:
15854 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15856 case OPC_ABSQ_S_OB
:
15858 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15860 case OPC_ABSQ_S_PW
:
15862 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15864 case OPC_ABSQ_S_QH
:
15866 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15870 case OPC_ADDU_OB_DSP
:
15872 case OPC_RADDU_L_OB
:
15874 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15878 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15880 case OPC_SUBQ_S_PW
:
15882 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15886 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15888 case OPC_SUBQ_S_QH
:
15890 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15894 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15896 case OPC_SUBU_S_OB
:
15898 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15902 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15904 case OPC_SUBU_S_QH
:
15906 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15910 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15912 case OPC_SUBUH_R_OB
:
15914 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15918 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15920 case OPC_ADDQ_S_PW
:
15922 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15926 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15928 case OPC_ADDQ_S_QH
:
15930 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15934 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15936 case OPC_ADDU_S_OB
:
15938 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15942 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15944 case OPC_ADDU_S_QH
:
15946 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15950 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15952 case OPC_ADDUH_R_OB
:
15954 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15958 case OPC_CMPU_EQ_OB_DSP
:
15960 case OPC_PRECR_OB_QH
:
15962 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15964 case OPC_PRECR_SRA_QH_PW
:
15967 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15968 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15969 tcg_temp_free_i32(ret_t
);
15972 case OPC_PRECR_SRA_R_QH_PW
:
15975 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15976 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15977 tcg_temp_free_i32(sa_v
);
15980 case OPC_PRECRQ_OB_QH
:
15982 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15984 case OPC_PRECRQ_PW_L
:
15986 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15988 case OPC_PRECRQ_QH_PW
:
15990 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15992 case OPC_PRECRQ_RS_QH_PW
:
15994 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15996 case OPC_PRECRQU_S_OB_QH
:
15998 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16005 tcg_temp_free(v1_t
);
16006 tcg_temp_free(v2_t
);
16009 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
16010 int ret
, int v1
, int v2
)
16018 /* Treat as NOP. */
16022 t0
= tcg_temp_new();
16023 v1_t
= tcg_temp_new();
16024 v2_t
= tcg_temp_new();
16026 tcg_gen_movi_tl(t0
, v1
);
16027 gen_load_gpr(v1_t
, v1
);
16028 gen_load_gpr(v2_t
, v2
);
16031 case OPC_SHLL_QB_DSP
:
16033 op2
= MASK_SHLL_QB(ctx
->opcode
);
16037 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16041 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16045 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16049 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16051 case OPC_SHLL_S_PH
:
16053 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16055 case OPC_SHLLV_S_PH
:
16057 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16061 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
16063 case OPC_SHLLV_S_W
:
16065 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16069 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
16073 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16077 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
16081 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16085 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
16087 case OPC_SHRA_R_QB
:
16089 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
16093 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16095 case OPC_SHRAV_R_QB
:
16097 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16101 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
16103 case OPC_SHRA_R_PH
:
16105 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
16109 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16111 case OPC_SHRAV_R_PH
:
16113 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16117 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
16119 case OPC_SHRAV_R_W
:
16121 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
16123 default: /* Invalid */
16124 MIPS_INVAL("MASK SHLL.QB");
16125 generate_exception_end(ctx
, EXCP_RI
);
16130 #ifdef TARGET_MIPS64
16131 case OPC_SHLL_OB_DSP
:
16132 op2
= MASK_SHLL_OB(ctx
->opcode
);
16136 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16140 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16142 case OPC_SHLL_S_PW
:
16144 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16146 case OPC_SHLLV_S_PW
:
16148 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16152 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16156 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16160 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16164 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16166 case OPC_SHLL_S_QH
:
16168 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
16170 case OPC_SHLLV_S_QH
:
16172 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
16176 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
16180 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16182 case OPC_SHRA_R_OB
:
16184 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
16186 case OPC_SHRAV_R_OB
:
16188 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16192 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
16196 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16198 case OPC_SHRA_R_PW
:
16200 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
16202 case OPC_SHRAV_R_PW
:
16204 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
16208 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
16212 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16214 case OPC_SHRA_R_QH
:
16216 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
16218 case OPC_SHRAV_R_QH
:
16220 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16224 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
16228 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
16232 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
16236 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
16238 default: /* Invalid */
16239 MIPS_INVAL("MASK SHLL.OB");
16240 generate_exception_end(ctx
, EXCP_RI
);
16248 tcg_temp_free(v1_t
);
16249 tcg_temp_free(v2_t
);
16252 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16253 int ret
, int v1
, int v2
, int check_ret
)
16259 if ((ret
== 0) && (check_ret
== 1)) {
16260 /* Treat as NOP. */
16264 t0
= tcg_temp_new_i32();
16265 v1_t
= tcg_temp_new();
16266 v2_t
= tcg_temp_new();
16268 tcg_gen_movi_i32(t0
, ret
);
16269 gen_load_gpr(v1_t
, v1
);
16270 gen_load_gpr(v2_t
, v2
);
16273 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16274 * the same mask and op1. */
16275 case OPC_MULT_G_2E
:
16279 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16282 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16285 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16287 case OPC_MULQ_RS_W
:
16288 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16292 case OPC_DPA_W_PH_DSP
:
16294 case OPC_DPAU_H_QBL
:
16296 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16298 case OPC_DPAU_H_QBR
:
16300 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16302 case OPC_DPSU_H_QBL
:
16304 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16306 case OPC_DPSU_H_QBR
:
16308 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16312 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16314 case OPC_DPAX_W_PH
:
16316 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16318 case OPC_DPAQ_S_W_PH
:
16320 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16322 case OPC_DPAQX_S_W_PH
:
16324 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16326 case OPC_DPAQX_SA_W_PH
:
16328 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16332 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16334 case OPC_DPSX_W_PH
:
16336 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16338 case OPC_DPSQ_S_W_PH
:
16340 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16342 case OPC_DPSQX_S_W_PH
:
16344 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16346 case OPC_DPSQX_SA_W_PH
:
16348 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16350 case OPC_MULSAQ_S_W_PH
:
16352 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16354 case OPC_DPAQ_SA_L_W
:
16356 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16358 case OPC_DPSQ_SA_L_W
:
16360 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16362 case OPC_MAQ_S_W_PHL
:
16364 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16366 case OPC_MAQ_S_W_PHR
:
16368 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16370 case OPC_MAQ_SA_W_PHL
:
16372 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16374 case OPC_MAQ_SA_W_PHR
:
16376 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16378 case OPC_MULSA_W_PH
:
16380 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16384 #ifdef TARGET_MIPS64
16385 case OPC_DPAQ_W_QH_DSP
:
16387 int ac
= ret
& 0x03;
16388 tcg_gen_movi_i32(t0
, ac
);
16393 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16397 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16401 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16405 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16409 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16411 case OPC_DPAQ_S_W_QH
:
16413 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16415 case OPC_DPAQ_SA_L_PW
:
16417 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16419 case OPC_DPAU_H_OBL
:
16421 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16423 case OPC_DPAU_H_OBR
:
16425 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16429 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16431 case OPC_DPSQ_S_W_QH
:
16433 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16435 case OPC_DPSQ_SA_L_PW
:
16437 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16439 case OPC_DPSU_H_OBL
:
16441 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16443 case OPC_DPSU_H_OBR
:
16445 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16447 case OPC_MAQ_S_L_PWL
:
16449 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16451 case OPC_MAQ_S_L_PWR
:
16453 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16455 case OPC_MAQ_S_W_QHLL
:
16457 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16459 case OPC_MAQ_SA_W_QHLL
:
16461 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16463 case OPC_MAQ_S_W_QHLR
:
16465 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16467 case OPC_MAQ_SA_W_QHLR
:
16469 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16471 case OPC_MAQ_S_W_QHRL
:
16473 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16475 case OPC_MAQ_SA_W_QHRL
:
16477 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16479 case OPC_MAQ_S_W_QHRR
:
16481 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16483 case OPC_MAQ_SA_W_QHRR
:
16485 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16487 case OPC_MULSAQ_S_L_PW
:
16489 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16491 case OPC_MULSAQ_S_W_QH
:
16493 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16499 case OPC_ADDU_QB_DSP
:
16501 case OPC_MULEU_S_PH_QBL
:
16503 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16505 case OPC_MULEU_S_PH_QBR
:
16507 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16509 case OPC_MULQ_RS_PH
:
16511 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16513 case OPC_MULEQ_S_W_PHL
:
16515 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16517 case OPC_MULEQ_S_W_PHR
:
16519 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16521 case OPC_MULQ_S_PH
:
16523 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16527 #ifdef TARGET_MIPS64
16528 case OPC_ADDU_OB_DSP
:
16530 case OPC_MULEQ_S_PW_QHL
:
16532 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16534 case OPC_MULEQ_S_PW_QHR
:
16536 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16538 case OPC_MULEU_S_QH_OBL
:
16540 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16542 case OPC_MULEU_S_QH_OBR
:
16544 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16546 case OPC_MULQ_RS_QH
:
16548 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16555 tcg_temp_free_i32(t0
);
16556 tcg_temp_free(v1_t
);
16557 tcg_temp_free(v2_t
);
16560 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16568 /* Treat as NOP. */
16572 t0
= tcg_temp_new();
16573 val_t
= tcg_temp_new();
16574 gen_load_gpr(val_t
, val
);
16577 case OPC_ABSQ_S_PH_DSP
:
16581 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16586 target_long result
;
16587 imm
= (ctx
->opcode
>> 16) & 0xFF;
16588 result
= (uint32_t)imm
<< 24 |
16589 (uint32_t)imm
<< 16 |
16590 (uint32_t)imm
<< 8 |
16592 result
= (int32_t)result
;
16593 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16598 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16599 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16600 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16601 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16602 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16603 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16608 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16609 imm
= (int16_t)(imm
<< 6) >> 6;
16610 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16611 (target_long
)((int32_t)imm
<< 16 | \
16617 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16618 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16619 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16620 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16624 #ifdef TARGET_MIPS64
16625 case OPC_ABSQ_S_QH_DSP
:
16632 imm
= (ctx
->opcode
>> 16) & 0xFF;
16633 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16634 temp
= (temp
<< 16) | temp
;
16635 temp
= (temp
<< 32) | temp
;
16636 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16644 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16645 imm
= (int16_t)(imm
<< 6) >> 6;
16646 temp
= ((target_long
)imm
<< 32) \
16647 | ((target_long
)imm
& 0xFFFFFFFF);
16648 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16656 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16657 imm
= (int16_t)(imm
<< 6) >> 6;
16659 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16660 ((uint64_t)(uint16_t)imm
<< 32) |
16661 ((uint64_t)(uint16_t)imm
<< 16) |
16662 (uint64_t)(uint16_t)imm
;
16663 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16668 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16669 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16670 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16671 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16672 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16673 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16674 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16678 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16679 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16680 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16684 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16685 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16686 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16687 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16688 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16695 tcg_temp_free(val_t
);
16698 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16699 uint32_t op1
, uint32_t op2
,
16700 int ret
, int v1
, int v2
, int check_ret
)
16706 if ((ret
== 0) && (check_ret
== 1)) {
16707 /* Treat as NOP. */
16711 t1
= tcg_temp_new();
16712 v1_t
= tcg_temp_new();
16713 v2_t
= tcg_temp_new();
16715 gen_load_gpr(v1_t
, v1
);
16716 gen_load_gpr(v2_t
, v2
);
16719 case OPC_CMPU_EQ_QB_DSP
:
16721 case OPC_CMPU_EQ_QB
:
16723 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16725 case OPC_CMPU_LT_QB
:
16727 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16729 case OPC_CMPU_LE_QB
:
16731 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16733 case OPC_CMPGU_EQ_QB
:
16735 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16737 case OPC_CMPGU_LT_QB
:
16739 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16741 case OPC_CMPGU_LE_QB
:
16743 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16745 case OPC_CMPGDU_EQ_QB
:
16747 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16748 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16749 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16750 tcg_gen_shli_tl(t1
, t1
, 24);
16751 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16753 case OPC_CMPGDU_LT_QB
:
16755 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16756 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16757 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16758 tcg_gen_shli_tl(t1
, t1
, 24);
16759 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16761 case OPC_CMPGDU_LE_QB
:
16763 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16764 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16765 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16766 tcg_gen_shli_tl(t1
, t1
, 24);
16767 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16769 case OPC_CMP_EQ_PH
:
16771 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16773 case OPC_CMP_LT_PH
:
16775 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16777 case OPC_CMP_LE_PH
:
16779 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16783 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16787 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16789 case OPC_PACKRL_PH
:
16791 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16795 #ifdef TARGET_MIPS64
16796 case OPC_CMPU_EQ_OB_DSP
:
16798 case OPC_CMP_EQ_PW
:
16800 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16802 case OPC_CMP_LT_PW
:
16804 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16806 case OPC_CMP_LE_PW
:
16808 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16810 case OPC_CMP_EQ_QH
:
16812 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16814 case OPC_CMP_LT_QH
:
16816 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16818 case OPC_CMP_LE_QH
:
16820 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16822 case OPC_CMPGDU_EQ_OB
:
16824 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16826 case OPC_CMPGDU_LT_OB
:
16828 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16830 case OPC_CMPGDU_LE_OB
:
16832 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16834 case OPC_CMPGU_EQ_OB
:
16836 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16838 case OPC_CMPGU_LT_OB
:
16840 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16842 case OPC_CMPGU_LE_OB
:
16844 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16846 case OPC_CMPU_EQ_OB
:
16848 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16850 case OPC_CMPU_LT_OB
:
16852 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16854 case OPC_CMPU_LE_OB
:
16856 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16858 case OPC_PACKRL_PW
:
16860 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16864 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16868 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16872 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16880 tcg_temp_free(v1_t
);
16881 tcg_temp_free(v2_t
);
16884 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16885 uint32_t op1
, int rt
, int rs
, int sa
)
16892 /* Treat as NOP. */
16896 t0
= tcg_temp_new();
16897 gen_load_gpr(t0
, rs
);
16900 case OPC_APPEND_DSP
:
16901 switch (MASK_APPEND(ctx
->opcode
)) {
16904 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16906 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16910 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16911 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16912 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16913 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16915 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16919 if (sa
!= 0 && sa
!= 2) {
16920 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16921 tcg_gen_ext32u_tl(t0
, t0
);
16922 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16923 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16925 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16927 default: /* Invalid */
16928 MIPS_INVAL("MASK APPEND");
16929 generate_exception_end(ctx
, EXCP_RI
);
16933 #ifdef TARGET_MIPS64
16934 case OPC_DAPPEND_DSP
:
16935 switch (MASK_DAPPEND(ctx
->opcode
)) {
16938 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16942 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16943 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16944 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16948 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16949 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16950 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16955 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16956 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16957 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16958 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16961 default: /* Invalid */
16962 MIPS_INVAL("MASK DAPPEND");
16963 generate_exception_end(ctx
, EXCP_RI
);
16972 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16973 int ret
, int v1
, int v2
, int check_ret
)
16982 if ((ret
== 0) && (check_ret
== 1)) {
16983 /* Treat as NOP. */
16987 t0
= tcg_temp_new();
16988 t1
= tcg_temp_new();
16989 v1_t
= tcg_temp_new();
16990 v2_t
= tcg_temp_new();
16992 gen_load_gpr(v1_t
, v1
);
16993 gen_load_gpr(v2_t
, v2
);
16996 case OPC_EXTR_W_DSP
:
17000 tcg_gen_movi_tl(t0
, v2
);
17001 tcg_gen_movi_tl(t1
, v1
);
17002 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17005 tcg_gen_movi_tl(t0
, v2
);
17006 tcg_gen_movi_tl(t1
, v1
);
17007 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17009 case OPC_EXTR_RS_W
:
17010 tcg_gen_movi_tl(t0
, v2
);
17011 tcg_gen_movi_tl(t1
, v1
);
17012 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17015 tcg_gen_movi_tl(t0
, v2
);
17016 tcg_gen_movi_tl(t1
, v1
);
17017 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17019 case OPC_EXTRV_S_H
:
17020 tcg_gen_movi_tl(t0
, v2
);
17021 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17024 tcg_gen_movi_tl(t0
, v2
);
17025 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17027 case OPC_EXTRV_R_W
:
17028 tcg_gen_movi_tl(t0
, v2
);
17029 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17031 case OPC_EXTRV_RS_W
:
17032 tcg_gen_movi_tl(t0
, v2
);
17033 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17036 tcg_gen_movi_tl(t0
, v2
);
17037 tcg_gen_movi_tl(t1
, v1
);
17038 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17041 tcg_gen_movi_tl(t0
, v2
);
17042 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17045 tcg_gen_movi_tl(t0
, v2
);
17046 tcg_gen_movi_tl(t1
, v1
);
17047 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17050 tcg_gen_movi_tl(t0
, v2
);
17051 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17054 imm
= (ctx
->opcode
>> 20) & 0x3F;
17055 tcg_gen_movi_tl(t0
, ret
);
17056 tcg_gen_movi_tl(t1
, imm
);
17057 gen_helper_shilo(t0
, t1
, cpu_env
);
17060 tcg_gen_movi_tl(t0
, ret
);
17061 gen_helper_shilo(t0
, v1_t
, cpu_env
);
17064 tcg_gen_movi_tl(t0
, ret
);
17065 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
17068 imm
= (ctx
->opcode
>> 11) & 0x3FF;
17069 tcg_gen_movi_tl(t0
, imm
);
17070 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
17073 imm
= (ctx
->opcode
>> 16) & 0x03FF;
17074 tcg_gen_movi_tl(t0
, imm
);
17075 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
17079 #ifdef TARGET_MIPS64
17080 case OPC_DEXTR_W_DSP
:
17084 tcg_gen_movi_tl(t0
, ret
);
17085 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
17089 int shift
= (ctx
->opcode
>> 19) & 0x7F;
17090 int ac
= (ctx
->opcode
>> 11) & 0x03;
17091 tcg_gen_movi_tl(t0
, shift
);
17092 tcg_gen_movi_tl(t1
, ac
);
17093 gen_helper_dshilo(t0
, t1
, cpu_env
);
17098 int ac
= (ctx
->opcode
>> 11) & 0x03;
17099 tcg_gen_movi_tl(t0
, ac
);
17100 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
17104 tcg_gen_movi_tl(t0
, v2
);
17105 tcg_gen_movi_tl(t1
, v1
);
17107 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17110 tcg_gen_movi_tl(t0
, v2
);
17111 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17114 tcg_gen_movi_tl(t0
, v2
);
17115 tcg_gen_movi_tl(t1
, v1
);
17116 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17119 tcg_gen_movi_tl(t0
, v2
);
17120 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17123 tcg_gen_movi_tl(t0
, v2
);
17124 tcg_gen_movi_tl(t1
, v1
);
17125 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17127 case OPC_DEXTR_R_L
:
17128 tcg_gen_movi_tl(t0
, v2
);
17129 tcg_gen_movi_tl(t1
, v1
);
17130 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17132 case OPC_DEXTR_RS_L
:
17133 tcg_gen_movi_tl(t0
, v2
);
17134 tcg_gen_movi_tl(t1
, v1
);
17135 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17138 tcg_gen_movi_tl(t0
, v2
);
17139 tcg_gen_movi_tl(t1
, v1
);
17140 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17142 case OPC_DEXTR_R_W
:
17143 tcg_gen_movi_tl(t0
, v2
);
17144 tcg_gen_movi_tl(t1
, v1
);
17145 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17147 case OPC_DEXTR_RS_W
:
17148 tcg_gen_movi_tl(t0
, v2
);
17149 tcg_gen_movi_tl(t1
, v1
);
17150 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17152 case OPC_DEXTR_S_H
:
17153 tcg_gen_movi_tl(t0
, v2
);
17154 tcg_gen_movi_tl(t1
, v1
);
17155 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17157 case OPC_DEXTRV_S_H
:
17158 tcg_gen_movi_tl(t0
, v2
);
17159 tcg_gen_movi_tl(t1
, v1
);
17160 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
17163 tcg_gen_movi_tl(t0
, v2
);
17164 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17166 case OPC_DEXTRV_R_L
:
17167 tcg_gen_movi_tl(t0
, v2
);
17168 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17170 case OPC_DEXTRV_RS_L
:
17171 tcg_gen_movi_tl(t0
, v2
);
17172 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17175 tcg_gen_movi_tl(t0
, v2
);
17176 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17178 case OPC_DEXTRV_R_W
:
17179 tcg_gen_movi_tl(t0
, v2
);
17180 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17182 case OPC_DEXTRV_RS_W
:
17183 tcg_gen_movi_tl(t0
, v2
);
17184 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
17193 tcg_temp_free(v1_t
);
17194 tcg_temp_free(v2_t
);
17197 /* End MIPSDSP functions. */
17199 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17201 int rs
, rt
, rd
, sa
;
17204 rs
= (ctx
->opcode
>> 21) & 0x1f;
17205 rt
= (ctx
->opcode
>> 16) & 0x1f;
17206 rd
= (ctx
->opcode
>> 11) & 0x1f;
17207 sa
= (ctx
->opcode
>> 6) & 0x1f;
17209 op1
= MASK_SPECIAL(ctx
->opcode
);
17212 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17214 case OPC_MULT
... OPC_DIVU
:
17215 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17225 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17228 MIPS_INVAL("special_r6 muldiv");
17229 generate_exception_end(ctx
, EXCP_RI
);
17235 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17239 if (rt
== 0 && sa
== 1) {
17240 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17241 We need additionally to check other fields */
17242 gen_cl(ctx
, op1
, rd
, rs
);
17244 generate_exception_end(ctx
, EXCP_RI
);
17248 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17249 gen_helper_do_semihosting(cpu_env
);
17251 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17252 generate_exception_end(ctx
, EXCP_RI
);
17254 generate_exception_end(ctx
, EXCP_DBp
);
17258 #if defined(TARGET_MIPS64)
17260 check_mips_64(ctx
);
17261 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17265 if (rt
== 0 && sa
== 1) {
17266 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17267 We need additionally to check other fields */
17268 check_mips_64(ctx
);
17269 gen_cl(ctx
, op1
, rd
, rs
);
17271 generate_exception_end(ctx
, EXCP_RI
);
17274 case OPC_DMULT
... OPC_DDIVU
:
17275 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17285 check_mips_64(ctx
);
17286 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17289 MIPS_INVAL("special_r6 muldiv");
17290 generate_exception_end(ctx
, EXCP_RI
);
17295 default: /* Invalid */
17296 MIPS_INVAL("special_r6");
17297 generate_exception_end(ctx
, EXCP_RI
);
17302 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17304 int rs
, rt
, rd
, sa
;
17307 rs
= (ctx
->opcode
>> 21) & 0x1f;
17308 rt
= (ctx
->opcode
>> 16) & 0x1f;
17309 rd
= (ctx
->opcode
>> 11) & 0x1f;
17310 sa
= (ctx
->opcode
>> 6) & 0x1f;
17312 op1
= MASK_SPECIAL(ctx
->opcode
);
17314 case OPC_MOVN
: /* Conditional move */
17316 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17317 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17318 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17320 case OPC_MFHI
: /* Move from HI/LO */
17322 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17325 case OPC_MTLO
: /* Move to HI/LO */
17326 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17329 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17330 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17331 check_cp1_enabled(ctx
);
17332 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17333 (ctx
->opcode
>> 16) & 1);
17335 generate_exception_err(ctx
, EXCP_CpU
, 1);
17341 check_insn(ctx
, INSN_VR54XX
);
17342 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17343 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17345 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17350 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17352 #if defined(TARGET_MIPS64)
17353 case OPC_DMULT
... OPC_DDIVU
:
17354 check_insn(ctx
, ISA_MIPS3
);
17355 check_mips_64(ctx
);
17356 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17360 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17363 #ifdef MIPS_STRICT_STANDARD
17364 MIPS_INVAL("SPIM");
17365 generate_exception_end(ctx
, EXCP_RI
);
17367 /* Implemented as RI exception for now. */
17368 MIPS_INVAL("spim (unofficial)");
17369 generate_exception_end(ctx
, EXCP_RI
);
17372 default: /* Invalid */
17373 MIPS_INVAL("special_legacy");
17374 generate_exception_end(ctx
, EXCP_RI
);
17379 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17381 int rs
, rt
, rd
, sa
;
17384 rs
= (ctx
->opcode
>> 21) & 0x1f;
17385 rt
= (ctx
->opcode
>> 16) & 0x1f;
17386 rd
= (ctx
->opcode
>> 11) & 0x1f;
17387 sa
= (ctx
->opcode
>> 6) & 0x1f;
17389 op1
= MASK_SPECIAL(ctx
->opcode
);
17391 case OPC_SLL
: /* Shift with immediate */
17392 if (sa
== 5 && rd
== 0 &&
17393 rs
== 0 && rt
== 0) { /* PAUSE */
17394 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17395 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17396 generate_exception_end(ctx
, EXCP_RI
);
17402 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17405 switch ((ctx
->opcode
>> 21) & 0x1f) {
17407 /* rotr is decoded as srl on non-R2 CPUs */
17408 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17413 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17416 generate_exception_end(ctx
, EXCP_RI
);
17420 case OPC_ADD
... OPC_SUBU
:
17421 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17423 case OPC_SLLV
: /* Shifts */
17425 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17428 switch ((ctx
->opcode
>> 6) & 0x1f) {
17430 /* rotrv is decoded as srlv on non-R2 CPUs */
17431 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17436 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17439 generate_exception_end(ctx
, EXCP_RI
);
17443 case OPC_SLT
: /* Set on less than */
17445 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17447 case OPC_AND
: /* Logic*/
17451 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17454 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17456 case OPC_TGE
... OPC_TEQ
: /* Traps */
17458 check_insn(ctx
, ISA_MIPS2
);
17459 gen_trap(ctx
, op1
, rs
, rt
, -1);
17461 case OPC_LSA
: /* OPC_PMON */
17462 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17463 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17464 decode_opc_special_r6(env
, ctx
);
17466 /* Pmon entry point, also R4010 selsl */
17467 #ifdef MIPS_STRICT_STANDARD
17468 MIPS_INVAL("PMON / selsl");
17469 generate_exception_end(ctx
, EXCP_RI
);
17471 gen_helper_0e0i(pmon
, sa
);
17476 generate_exception_end(ctx
, EXCP_SYSCALL
);
17479 generate_exception_end(ctx
, EXCP_BREAK
);
17482 check_insn(ctx
, ISA_MIPS2
);
17483 gen_sync(extract32(ctx
->opcode
, 6, 5));
17486 #if defined(TARGET_MIPS64)
17487 /* MIPS64 specific opcodes */
17492 check_insn(ctx
, ISA_MIPS3
);
17493 check_mips_64(ctx
);
17494 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17497 switch ((ctx
->opcode
>> 21) & 0x1f) {
17499 /* drotr is decoded as dsrl on non-R2 CPUs */
17500 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17505 check_insn(ctx
, ISA_MIPS3
);
17506 check_mips_64(ctx
);
17507 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17510 generate_exception_end(ctx
, EXCP_RI
);
17515 switch ((ctx
->opcode
>> 21) & 0x1f) {
17517 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17518 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17523 check_insn(ctx
, ISA_MIPS3
);
17524 check_mips_64(ctx
);
17525 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17528 generate_exception_end(ctx
, EXCP_RI
);
17532 case OPC_DADD
... OPC_DSUBU
:
17533 check_insn(ctx
, ISA_MIPS3
);
17534 check_mips_64(ctx
);
17535 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17539 check_insn(ctx
, ISA_MIPS3
);
17540 check_mips_64(ctx
);
17541 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17544 switch ((ctx
->opcode
>> 6) & 0x1f) {
17546 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17547 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17552 check_insn(ctx
, ISA_MIPS3
);
17553 check_mips_64(ctx
);
17554 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17557 generate_exception_end(ctx
, EXCP_RI
);
17562 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17563 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17564 decode_opc_special_r6(env
, ctx
);
17569 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17570 decode_opc_special_r6(env
, ctx
);
17572 decode_opc_special_legacy(env
, ctx
);
17577 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17582 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17584 rs
= (ctx
->opcode
>> 21) & 0x1f;
17585 rt
= (ctx
->opcode
>> 16) & 0x1f;
17586 rd
= (ctx
->opcode
>> 11) & 0x1f;
17588 op1
= MASK_SPECIAL2(ctx
->opcode
);
17590 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17591 case OPC_MSUB
... OPC_MSUBU
:
17592 check_insn(ctx
, ISA_MIPS32
);
17593 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17596 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17599 case OPC_DIVU_G_2F
:
17600 case OPC_MULT_G_2F
:
17601 case OPC_MULTU_G_2F
:
17603 case OPC_MODU_G_2F
:
17604 check_insn(ctx
, INSN_LOONGSON2F
);
17605 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17609 check_insn(ctx
, ISA_MIPS32
);
17610 gen_cl(ctx
, op1
, rd
, rs
);
17613 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17614 gen_helper_do_semihosting(cpu_env
);
17616 /* XXX: not clear which exception should be raised
17617 * when in debug mode...
17619 check_insn(ctx
, ISA_MIPS32
);
17620 generate_exception_end(ctx
, EXCP_DBp
);
17623 #if defined(TARGET_MIPS64)
17626 check_insn(ctx
, ISA_MIPS64
);
17627 check_mips_64(ctx
);
17628 gen_cl(ctx
, op1
, rd
, rs
);
17630 case OPC_DMULT_G_2F
:
17631 case OPC_DMULTU_G_2F
:
17632 case OPC_DDIV_G_2F
:
17633 case OPC_DDIVU_G_2F
:
17634 case OPC_DMOD_G_2F
:
17635 case OPC_DMODU_G_2F
:
17636 check_insn(ctx
, INSN_LOONGSON2F
);
17637 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17640 default: /* Invalid */
17641 MIPS_INVAL("special2_legacy");
17642 generate_exception_end(ctx
, EXCP_RI
);
17647 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17649 int rs
, rt
, rd
, sa
;
17653 rs
= (ctx
->opcode
>> 21) & 0x1f;
17654 rt
= (ctx
->opcode
>> 16) & 0x1f;
17655 rd
= (ctx
->opcode
>> 11) & 0x1f;
17656 sa
= (ctx
->opcode
>> 6) & 0x1f;
17657 imm
= (int16_t)ctx
->opcode
>> 7;
17659 op1
= MASK_SPECIAL3(ctx
->opcode
);
17663 /* hint codes 24-31 are reserved and signal RI */
17664 generate_exception_end(ctx
, EXCP_RI
);
17666 /* Treat as NOP. */
17669 check_cp0_enabled(ctx
);
17670 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17671 gen_cache_operation(ctx
, rt
, rs
, imm
);
17675 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17678 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17683 /* Treat as NOP. */
17686 op2
= MASK_BSHFL(ctx
->opcode
);
17688 case OPC_ALIGN
... OPC_ALIGN_END
:
17689 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17692 gen_bitswap(ctx
, op2
, rd
, rt
);
17697 #if defined(TARGET_MIPS64)
17699 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17702 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17705 check_mips_64(ctx
);
17708 /* Treat as NOP. */
17711 op2
= MASK_DBSHFL(ctx
->opcode
);
17713 case OPC_DALIGN
... OPC_DALIGN_END
:
17714 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17717 gen_bitswap(ctx
, op2
, rd
, rt
);
17724 default: /* Invalid */
17725 MIPS_INVAL("special3_r6");
17726 generate_exception_end(ctx
, EXCP_RI
);
17731 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17736 rs
= (ctx
->opcode
>> 21) & 0x1f;
17737 rt
= (ctx
->opcode
>> 16) & 0x1f;
17738 rd
= (ctx
->opcode
>> 11) & 0x1f;
17740 op1
= MASK_SPECIAL3(ctx
->opcode
);
17742 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17743 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17744 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17745 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17746 * the same mask and op1. */
17747 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17748 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17751 case OPC_ADDUH_R_QB
:
17753 case OPC_ADDQH_R_PH
:
17755 case OPC_ADDQH_R_W
:
17757 case OPC_SUBUH_R_QB
:
17759 case OPC_SUBQH_R_PH
:
17761 case OPC_SUBQH_R_W
:
17762 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17767 case OPC_MULQ_RS_W
:
17768 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17771 MIPS_INVAL("MASK ADDUH.QB");
17772 generate_exception_end(ctx
, EXCP_RI
);
17775 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17776 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17778 generate_exception_end(ctx
, EXCP_RI
);
17782 op2
= MASK_LX(ctx
->opcode
);
17784 #if defined(TARGET_MIPS64)
17790 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17792 default: /* Invalid */
17793 MIPS_INVAL("MASK LX");
17794 generate_exception_end(ctx
, EXCP_RI
);
17798 case OPC_ABSQ_S_PH_DSP
:
17799 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17801 case OPC_ABSQ_S_QB
:
17802 case OPC_ABSQ_S_PH
:
17804 case OPC_PRECEQ_W_PHL
:
17805 case OPC_PRECEQ_W_PHR
:
17806 case OPC_PRECEQU_PH_QBL
:
17807 case OPC_PRECEQU_PH_QBR
:
17808 case OPC_PRECEQU_PH_QBLA
:
17809 case OPC_PRECEQU_PH_QBRA
:
17810 case OPC_PRECEU_PH_QBL
:
17811 case OPC_PRECEU_PH_QBR
:
17812 case OPC_PRECEU_PH_QBLA
:
17813 case OPC_PRECEU_PH_QBRA
:
17814 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17821 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17824 MIPS_INVAL("MASK ABSQ_S.PH");
17825 generate_exception_end(ctx
, EXCP_RI
);
17829 case OPC_ADDU_QB_DSP
:
17830 op2
= MASK_ADDU_QB(ctx
->opcode
);
17833 case OPC_ADDQ_S_PH
:
17836 case OPC_ADDU_S_QB
:
17838 case OPC_ADDU_S_PH
:
17840 case OPC_SUBQ_S_PH
:
17843 case OPC_SUBU_S_QB
:
17845 case OPC_SUBU_S_PH
:
17849 case OPC_RADDU_W_QB
:
17850 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17852 case OPC_MULEU_S_PH_QBL
:
17853 case OPC_MULEU_S_PH_QBR
:
17854 case OPC_MULQ_RS_PH
:
17855 case OPC_MULEQ_S_W_PHL
:
17856 case OPC_MULEQ_S_W_PHR
:
17857 case OPC_MULQ_S_PH
:
17858 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17860 default: /* Invalid */
17861 MIPS_INVAL("MASK ADDU.QB");
17862 generate_exception_end(ctx
, EXCP_RI
);
17867 case OPC_CMPU_EQ_QB_DSP
:
17868 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17870 case OPC_PRECR_SRA_PH_W
:
17871 case OPC_PRECR_SRA_R_PH_W
:
17872 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17874 case OPC_PRECR_QB_PH
:
17875 case OPC_PRECRQ_QB_PH
:
17876 case OPC_PRECRQ_PH_W
:
17877 case OPC_PRECRQ_RS_PH_W
:
17878 case OPC_PRECRQU_S_QB_PH
:
17879 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17881 case OPC_CMPU_EQ_QB
:
17882 case OPC_CMPU_LT_QB
:
17883 case OPC_CMPU_LE_QB
:
17884 case OPC_CMP_EQ_PH
:
17885 case OPC_CMP_LT_PH
:
17886 case OPC_CMP_LE_PH
:
17887 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17889 case OPC_CMPGU_EQ_QB
:
17890 case OPC_CMPGU_LT_QB
:
17891 case OPC_CMPGU_LE_QB
:
17892 case OPC_CMPGDU_EQ_QB
:
17893 case OPC_CMPGDU_LT_QB
:
17894 case OPC_CMPGDU_LE_QB
:
17897 case OPC_PACKRL_PH
:
17898 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17900 default: /* Invalid */
17901 MIPS_INVAL("MASK CMPU.EQ.QB");
17902 generate_exception_end(ctx
, EXCP_RI
);
17906 case OPC_SHLL_QB_DSP
:
17907 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17909 case OPC_DPA_W_PH_DSP
:
17910 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17912 case OPC_DPAU_H_QBL
:
17913 case OPC_DPAU_H_QBR
:
17914 case OPC_DPSU_H_QBL
:
17915 case OPC_DPSU_H_QBR
:
17917 case OPC_DPAX_W_PH
:
17918 case OPC_DPAQ_S_W_PH
:
17919 case OPC_DPAQX_S_W_PH
:
17920 case OPC_DPAQX_SA_W_PH
:
17922 case OPC_DPSX_W_PH
:
17923 case OPC_DPSQ_S_W_PH
:
17924 case OPC_DPSQX_S_W_PH
:
17925 case OPC_DPSQX_SA_W_PH
:
17926 case OPC_MULSAQ_S_W_PH
:
17927 case OPC_DPAQ_SA_L_W
:
17928 case OPC_DPSQ_SA_L_W
:
17929 case OPC_MAQ_S_W_PHL
:
17930 case OPC_MAQ_S_W_PHR
:
17931 case OPC_MAQ_SA_W_PHL
:
17932 case OPC_MAQ_SA_W_PHR
:
17933 case OPC_MULSA_W_PH
:
17934 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17936 default: /* Invalid */
17937 MIPS_INVAL("MASK DPAW.PH");
17938 generate_exception_end(ctx
, EXCP_RI
);
17943 op2
= MASK_INSV(ctx
->opcode
);
17954 t0
= tcg_temp_new();
17955 t1
= tcg_temp_new();
17957 gen_load_gpr(t0
, rt
);
17958 gen_load_gpr(t1
, rs
);
17960 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17966 default: /* Invalid */
17967 MIPS_INVAL("MASK INSV");
17968 generate_exception_end(ctx
, EXCP_RI
);
17972 case OPC_APPEND_DSP
:
17973 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17975 case OPC_EXTR_W_DSP
:
17976 op2
= MASK_EXTR_W(ctx
->opcode
);
17980 case OPC_EXTR_RS_W
:
17982 case OPC_EXTRV_S_H
:
17984 case OPC_EXTRV_R_W
:
17985 case OPC_EXTRV_RS_W
:
17990 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17993 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17999 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18001 default: /* Invalid */
18002 MIPS_INVAL("MASK EXTR.W");
18003 generate_exception_end(ctx
, EXCP_RI
);
18007 #if defined(TARGET_MIPS64)
18008 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
18009 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
18010 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
18011 check_insn(ctx
, INSN_LOONGSON2E
);
18012 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
18014 case OPC_ABSQ_S_QH_DSP
:
18015 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
18017 case OPC_PRECEQ_L_PWL
:
18018 case OPC_PRECEQ_L_PWR
:
18019 case OPC_PRECEQ_PW_QHL
:
18020 case OPC_PRECEQ_PW_QHR
:
18021 case OPC_PRECEQ_PW_QHLA
:
18022 case OPC_PRECEQ_PW_QHRA
:
18023 case OPC_PRECEQU_QH_OBL
:
18024 case OPC_PRECEQU_QH_OBR
:
18025 case OPC_PRECEQU_QH_OBLA
:
18026 case OPC_PRECEQU_QH_OBRA
:
18027 case OPC_PRECEU_QH_OBL
:
18028 case OPC_PRECEU_QH_OBR
:
18029 case OPC_PRECEU_QH_OBLA
:
18030 case OPC_PRECEU_QH_OBRA
:
18031 case OPC_ABSQ_S_OB
:
18032 case OPC_ABSQ_S_PW
:
18033 case OPC_ABSQ_S_QH
:
18034 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18042 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
18044 default: /* Invalid */
18045 MIPS_INVAL("MASK ABSQ_S.QH");
18046 generate_exception_end(ctx
, EXCP_RI
);
18050 case OPC_ADDU_OB_DSP
:
18051 op2
= MASK_ADDU_OB(ctx
->opcode
);
18053 case OPC_RADDU_L_OB
:
18055 case OPC_SUBQ_S_PW
:
18057 case OPC_SUBQ_S_QH
:
18059 case OPC_SUBU_S_OB
:
18061 case OPC_SUBU_S_QH
:
18063 case OPC_SUBUH_R_OB
:
18065 case OPC_ADDQ_S_PW
:
18067 case OPC_ADDQ_S_QH
:
18069 case OPC_ADDU_S_OB
:
18071 case OPC_ADDU_S_QH
:
18073 case OPC_ADDUH_R_OB
:
18074 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18076 case OPC_MULEQ_S_PW_QHL
:
18077 case OPC_MULEQ_S_PW_QHR
:
18078 case OPC_MULEU_S_QH_OBL
:
18079 case OPC_MULEU_S_QH_OBR
:
18080 case OPC_MULQ_RS_QH
:
18081 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18083 default: /* Invalid */
18084 MIPS_INVAL("MASK ADDU.OB");
18085 generate_exception_end(ctx
, EXCP_RI
);
18089 case OPC_CMPU_EQ_OB_DSP
:
18090 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
18092 case OPC_PRECR_SRA_QH_PW
:
18093 case OPC_PRECR_SRA_R_QH_PW
:
18094 /* Return value is rt. */
18095 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
18097 case OPC_PRECR_OB_QH
:
18098 case OPC_PRECRQ_OB_QH
:
18099 case OPC_PRECRQ_PW_L
:
18100 case OPC_PRECRQ_QH_PW
:
18101 case OPC_PRECRQ_RS_QH_PW
:
18102 case OPC_PRECRQU_S_OB_QH
:
18103 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
18105 case OPC_CMPU_EQ_OB
:
18106 case OPC_CMPU_LT_OB
:
18107 case OPC_CMPU_LE_OB
:
18108 case OPC_CMP_EQ_QH
:
18109 case OPC_CMP_LT_QH
:
18110 case OPC_CMP_LE_QH
:
18111 case OPC_CMP_EQ_PW
:
18112 case OPC_CMP_LT_PW
:
18113 case OPC_CMP_LE_PW
:
18114 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18116 case OPC_CMPGDU_EQ_OB
:
18117 case OPC_CMPGDU_LT_OB
:
18118 case OPC_CMPGDU_LE_OB
:
18119 case OPC_CMPGU_EQ_OB
:
18120 case OPC_CMPGU_LT_OB
:
18121 case OPC_CMPGU_LE_OB
:
18122 case OPC_PACKRL_PW
:
18126 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
18128 default: /* Invalid */
18129 MIPS_INVAL("MASK CMPU_EQ.OB");
18130 generate_exception_end(ctx
, EXCP_RI
);
18134 case OPC_DAPPEND_DSP
:
18135 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
18137 case OPC_DEXTR_W_DSP
:
18138 op2
= MASK_DEXTR_W(ctx
->opcode
);
18145 case OPC_DEXTR_R_L
:
18146 case OPC_DEXTR_RS_L
:
18148 case OPC_DEXTR_R_W
:
18149 case OPC_DEXTR_RS_W
:
18150 case OPC_DEXTR_S_H
:
18152 case OPC_DEXTRV_R_L
:
18153 case OPC_DEXTRV_RS_L
:
18154 case OPC_DEXTRV_S_H
:
18156 case OPC_DEXTRV_R_W
:
18157 case OPC_DEXTRV_RS_W
:
18158 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
18163 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18165 default: /* Invalid */
18166 MIPS_INVAL("MASK EXTR.W");
18167 generate_exception_end(ctx
, EXCP_RI
);
18171 case OPC_DPAQ_W_QH_DSP
:
18172 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
18174 case OPC_DPAU_H_OBL
:
18175 case OPC_DPAU_H_OBR
:
18176 case OPC_DPSU_H_OBL
:
18177 case OPC_DPSU_H_OBR
:
18179 case OPC_DPAQ_S_W_QH
:
18181 case OPC_DPSQ_S_W_QH
:
18182 case OPC_MULSAQ_S_W_QH
:
18183 case OPC_DPAQ_SA_L_PW
:
18184 case OPC_DPSQ_SA_L_PW
:
18185 case OPC_MULSAQ_S_L_PW
:
18186 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18188 case OPC_MAQ_S_W_QHLL
:
18189 case OPC_MAQ_S_W_QHLR
:
18190 case OPC_MAQ_S_W_QHRL
:
18191 case OPC_MAQ_S_W_QHRR
:
18192 case OPC_MAQ_SA_W_QHLL
:
18193 case OPC_MAQ_SA_W_QHLR
:
18194 case OPC_MAQ_SA_W_QHRL
:
18195 case OPC_MAQ_SA_W_QHRR
:
18196 case OPC_MAQ_S_L_PWL
:
18197 case OPC_MAQ_S_L_PWR
:
18202 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
18204 default: /* Invalid */
18205 MIPS_INVAL("MASK DPAQ.W.QH");
18206 generate_exception_end(ctx
, EXCP_RI
);
18210 case OPC_DINSV_DSP
:
18211 op2
= MASK_INSV(ctx
->opcode
);
18222 t0
= tcg_temp_new();
18223 t1
= tcg_temp_new();
18225 gen_load_gpr(t0
, rt
);
18226 gen_load_gpr(t1
, rs
);
18228 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
18234 default: /* Invalid */
18235 MIPS_INVAL("MASK DINSV");
18236 generate_exception_end(ctx
, EXCP_RI
);
18240 case OPC_SHLL_OB_DSP
:
18241 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18244 default: /* Invalid */
18245 MIPS_INVAL("special3_legacy");
18246 generate_exception_end(ctx
, EXCP_RI
);
18251 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18253 int rs
, rt
, rd
, sa
;
18257 rs
= (ctx
->opcode
>> 21) & 0x1f;
18258 rt
= (ctx
->opcode
>> 16) & 0x1f;
18259 rd
= (ctx
->opcode
>> 11) & 0x1f;
18260 sa
= (ctx
->opcode
>> 6) & 0x1f;
18261 imm
= sextract32(ctx
->opcode
, 7, 9);
18263 op1
= MASK_SPECIAL3(ctx
->opcode
);
18266 * EVA loads and stores overlap Loongson 2E instructions decoded by
18267 * decode_opc_special3_legacy(), so be careful to allow their decoding when
18272 case OPC_LWLE
... OPC_LWRE
:
18273 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18275 case OPC_LBUE
... OPC_LHUE
:
18276 case OPC_LBE
... OPC_LWE
:
18277 check_cp0_enabled(ctx
);
18278 gen_ld(ctx
, op1
, rt
, rs
, imm
);
18280 case OPC_SWLE
... OPC_SWRE
:
18281 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18283 case OPC_SBE
... OPC_SHE
:
18285 check_cp0_enabled(ctx
);
18286 gen_st(ctx
, op1
, rt
, rs
, imm
);
18289 check_cp0_enabled(ctx
);
18290 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
18293 check_cp0_enabled(ctx
);
18294 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
18295 gen_cache_operation(ctx
, rt
, rs
, imm
);
18297 /* Treat as NOP. */
18300 check_cp0_enabled(ctx
);
18301 /* Treat as NOP. */
18309 check_insn(ctx
, ISA_MIPS32R2
);
18310 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18313 op2
= MASK_BSHFL(ctx
->opcode
);
18315 case OPC_ALIGN
... OPC_ALIGN_END
:
18317 check_insn(ctx
, ISA_MIPS32R6
);
18318 decode_opc_special3_r6(env
, ctx
);
18321 check_insn(ctx
, ISA_MIPS32R2
);
18322 gen_bshfl(ctx
, op2
, rt
, rd
);
18326 #if defined(TARGET_MIPS64)
18327 case OPC_DEXTM
... OPC_DEXT
:
18328 case OPC_DINSM
... OPC_DINS
:
18329 check_insn(ctx
, ISA_MIPS64R2
);
18330 check_mips_64(ctx
);
18331 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18334 op2
= MASK_DBSHFL(ctx
->opcode
);
18336 case OPC_DALIGN
... OPC_DALIGN_END
:
18338 check_insn(ctx
, ISA_MIPS32R6
);
18339 decode_opc_special3_r6(env
, ctx
);
18342 check_insn(ctx
, ISA_MIPS64R2
);
18343 check_mips_64(ctx
);
18344 op2
= MASK_DBSHFL(ctx
->opcode
);
18345 gen_bshfl(ctx
, op2
, rt
, rd
);
18351 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18354 check_insn(ctx
, ASE_MT
);
18356 TCGv t0
= tcg_temp_new();
18357 TCGv t1
= tcg_temp_new();
18359 gen_load_gpr(t0
, rt
);
18360 gen_load_gpr(t1
, rs
);
18361 gen_helper_fork(t0
, t1
);
18367 check_insn(ctx
, ASE_MT
);
18369 TCGv t0
= tcg_temp_new();
18371 gen_load_gpr(t0
, rs
);
18372 gen_helper_yield(t0
, cpu_env
, t0
);
18373 gen_store_gpr(t0
, rd
);
18378 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18379 decode_opc_special3_r6(env
, ctx
);
18381 decode_opc_special3_legacy(env
, ctx
);
18386 /* MIPS SIMD Architecture (MSA) */
18387 static inline int check_msa_access(DisasContext
*ctx
)
18389 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18390 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18391 generate_exception_end(ctx
, EXCP_RI
);
18395 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18396 if (ctx
->insn_flags
& ASE_MSA
) {
18397 generate_exception_end(ctx
, EXCP_MSADIS
);
18400 generate_exception_end(ctx
, EXCP_RI
);
18407 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18409 /* generates tcg ops to check if any element is 0 */
18410 /* Note this function only works with MSA_WRLEN = 128 */
18411 uint64_t eval_zero_or_big
= 0;
18412 uint64_t eval_big
= 0;
18413 TCGv_i64 t0
= tcg_temp_new_i64();
18414 TCGv_i64 t1
= tcg_temp_new_i64();
18417 eval_zero_or_big
= 0x0101010101010101ULL
;
18418 eval_big
= 0x8080808080808080ULL
;
18421 eval_zero_or_big
= 0x0001000100010001ULL
;
18422 eval_big
= 0x8000800080008000ULL
;
18425 eval_zero_or_big
= 0x0000000100000001ULL
;
18426 eval_big
= 0x8000000080000000ULL
;
18429 eval_zero_or_big
= 0x0000000000000001ULL
;
18430 eval_big
= 0x8000000000000000ULL
;
18433 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18434 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18435 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18436 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18437 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18438 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18439 tcg_gen_or_i64(t0
, t0
, t1
);
18440 /* if all bits are zero then all elements are not zero */
18441 /* if some bit is non-zero then some element is zero */
18442 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18443 tcg_gen_trunc_i64_tl(tresult
, t0
);
18444 tcg_temp_free_i64(t0
);
18445 tcg_temp_free_i64(t1
);
18448 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18450 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18451 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18452 int64_t s16
= (int16_t)ctx
->opcode
;
18454 check_msa_access(ctx
);
18456 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18457 generate_exception_end(ctx
, EXCP_RI
);
18464 TCGv_i64 t0
= tcg_temp_new_i64();
18465 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18466 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18467 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18468 tcg_gen_trunc_i64_tl(bcond
, t0
);
18469 tcg_temp_free_i64(t0
);
18476 gen_check_zero_element(bcond
, df
, wt
);
18482 gen_check_zero_element(bcond
, df
, wt
);
18483 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18487 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18489 ctx
->hflags
|= MIPS_HFLAG_BC
;
18490 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18493 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18495 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18496 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18497 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18498 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18500 TCGv_i32 twd
= tcg_const_i32(wd
);
18501 TCGv_i32 tws
= tcg_const_i32(ws
);
18502 TCGv_i32 ti8
= tcg_const_i32(i8
);
18504 switch (MASK_MSA_I8(ctx
->opcode
)) {
18506 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18509 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18512 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18515 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18518 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18521 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18524 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18530 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18531 if (df
== DF_DOUBLE
) {
18532 generate_exception_end(ctx
, EXCP_RI
);
18534 TCGv_i32 tdf
= tcg_const_i32(df
);
18535 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18536 tcg_temp_free_i32(tdf
);
18541 MIPS_INVAL("MSA instruction");
18542 generate_exception_end(ctx
, EXCP_RI
);
18546 tcg_temp_free_i32(twd
);
18547 tcg_temp_free_i32(tws
);
18548 tcg_temp_free_i32(ti8
);
18551 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18553 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18554 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18555 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18556 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18557 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18558 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18560 TCGv_i32 tdf
= tcg_const_i32(df
);
18561 TCGv_i32 twd
= tcg_const_i32(wd
);
18562 TCGv_i32 tws
= tcg_const_i32(ws
);
18563 TCGv_i32 timm
= tcg_temp_new_i32();
18564 tcg_gen_movi_i32(timm
, u5
);
18566 switch (MASK_MSA_I5(ctx
->opcode
)) {
18568 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18571 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18573 case OPC_MAXI_S_df
:
18574 tcg_gen_movi_i32(timm
, s5
);
18575 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18577 case OPC_MAXI_U_df
:
18578 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18580 case OPC_MINI_S_df
:
18581 tcg_gen_movi_i32(timm
, s5
);
18582 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18584 case OPC_MINI_U_df
:
18585 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18588 tcg_gen_movi_i32(timm
, s5
);
18589 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18591 case OPC_CLTI_S_df
:
18592 tcg_gen_movi_i32(timm
, s5
);
18593 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18595 case OPC_CLTI_U_df
:
18596 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18598 case OPC_CLEI_S_df
:
18599 tcg_gen_movi_i32(timm
, s5
);
18600 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18602 case OPC_CLEI_U_df
:
18603 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18607 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18608 tcg_gen_movi_i32(timm
, s10
);
18609 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18613 MIPS_INVAL("MSA instruction");
18614 generate_exception_end(ctx
, EXCP_RI
);
18618 tcg_temp_free_i32(tdf
);
18619 tcg_temp_free_i32(twd
);
18620 tcg_temp_free_i32(tws
);
18621 tcg_temp_free_i32(timm
);
18624 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18626 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18627 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18628 uint32_t df
= 0, m
= 0;
18629 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18630 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18637 if ((dfm
& 0x40) == 0x00) {
18640 } else if ((dfm
& 0x60) == 0x40) {
18643 } else if ((dfm
& 0x70) == 0x60) {
18646 } else if ((dfm
& 0x78) == 0x70) {
18650 generate_exception_end(ctx
, EXCP_RI
);
18654 tdf
= tcg_const_i32(df
);
18655 tm
= tcg_const_i32(m
);
18656 twd
= tcg_const_i32(wd
);
18657 tws
= tcg_const_i32(ws
);
18659 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18661 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18664 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18667 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18670 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18673 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18676 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18678 case OPC_BINSLI_df
:
18679 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18681 case OPC_BINSRI_df
:
18682 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18685 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18688 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18691 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18694 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18697 MIPS_INVAL("MSA instruction");
18698 generate_exception_end(ctx
, EXCP_RI
);
18702 tcg_temp_free_i32(tdf
);
18703 tcg_temp_free_i32(tm
);
18704 tcg_temp_free_i32(twd
);
18705 tcg_temp_free_i32(tws
);
18708 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18710 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18711 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18712 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18713 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18714 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18716 TCGv_i32 tdf
= tcg_const_i32(df
);
18717 TCGv_i32 twd
= tcg_const_i32(wd
);
18718 TCGv_i32 tws
= tcg_const_i32(ws
);
18719 TCGv_i32 twt
= tcg_const_i32(wt
);
18721 switch (MASK_MSA_3R(ctx
->opcode
)) {
18723 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18726 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18729 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18732 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18734 case OPC_SUBS_S_df
:
18735 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18738 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18741 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18744 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18747 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18750 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18752 case OPC_ADDS_A_df
:
18753 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18755 case OPC_SUBS_U_df
:
18756 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18759 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18762 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18765 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18768 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18771 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18774 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18776 case OPC_ADDS_S_df
:
18777 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18779 case OPC_SUBSUS_U_df
:
18780 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18783 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18786 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18789 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18792 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18795 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18798 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18800 case OPC_ADDS_U_df
:
18801 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18803 case OPC_SUBSUU_S_df
:
18804 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18807 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18821 case OPC_ASUB_S_df
:
18822 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18825 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18834 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18840 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18842 case OPC_ASUB_U_df
:
18843 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18846 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18849 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18852 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18857 case OPC_AVER_S_df
:
18858 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18861 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18864 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18867 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18870 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18872 case OPC_AVER_U_df
:
18873 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18879 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18882 case OPC_DOTP_S_df
:
18883 case OPC_DOTP_U_df
:
18884 case OPC_DPADD_S_df
:
18885 case OPC_DPADD_U_df
:
18886 case OPC_DPSUB_S_df
:
18887 case OPC_HADD_S_df
:
18888 case OPC_DPSUB_U_df
:
18889 case OPC_HADD_U_df
:
18890 case OPC_HSUB_S_df
:
18891 case OPC_HSUB_U_df
:
18892 if (df
== DF_BYTE
) {
18893 generate_exception_end(ctx
, EXCP_RI
);
18896 switch (MASK_MSA_3R(ctx
->opcode
)) {
18897 case OPC_DOTP_S_df
:
18898 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18900 case OPC_DOTP_U_df
:
18901 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18903 case OPC_DPADD_S_df
:
18904 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18906 case OPC_DPADD_U_df
:
18907 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18909 case OPC_DPSUB_S_df
:
18910 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18912 case OPC_HADD_S_df
:
18913 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18915 case OPC_DPSUB_U_df
:
18916 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18918 case OPC_HADD_U_df
:
18919 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18921 case OPC_HSUB_S_df
:
18922 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18924 case OPC_HSUB_U_df
:
18925 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18930 MIPS_INVAL("MSA instruction");
18931 generate_exception_end(ctx
, EXCP_RI
);
18934 tcg_temp_free_i32(twd
);
18935 tcg_temp_free_i32(tws
);
18936 tcg_temp_free_i32(twt
);
18937 tcg_temp_free_i32(tdf
);
18940 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18942 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18943 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18944 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18945 TCGv telm
= tcg_temp_new();
18946 TCGv_i32 tsr
= tcg_const_i32(source
);
18947 TCGv_i32 tdt
= tcg_const_i32(dest
);
18949 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18951 gen_load_gpr(telm
, source
);
18952 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18955 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18956 gen_store_gpr(telm
, dest
);
18959 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18962 MIPS_INVAL("MSA instruction");
18963 generate_exception_end(ctx
, EXCP_RI
);
18967 tcg_temp_free(telm
);
18968 tcg_temp_free_i32(tdt
);
18969 tcg_temp_free_i32(tsr
);
18972 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18975 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18976 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18977 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18979 TCGv_i32 tws
= tcg_const_i32(ws
);
18980 TCGv_i32 twd
= tcg_const_i32(wd
);
18981 TCGv_i32 tn
= tcg_const_i32(n
);
18982 TCGv_i32 tdf
= tcg_const_i32(df
);
18984 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18986 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18988 case OPC_SPLATI_df
:
18989 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18992 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18994 case OPC_COPY_S_df
:
18995 case OPC_COPY_U_df
:
18996 case OPC_INSERT_df
:
18997 #if !defined(TARGET_MIPS64)
18998 /* Double format valid only for MIPS64 */
18999 if (df
== DF_DOUBLE
) {
19000 generate_exception_end(ctx
, EXCP_RI
);
19004 switch (MASK_MSA_ELM(ctx
->opcode
)) {
19005 case OPC_COPY_S_df
:
19006 if (likely(wd
!= 0)) {
19007 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
19010 case OPC_COPY_U_df
:
19011 if (likely(wd
!= 0)) {
19012 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
19015 case OPC_INSERT_df
:
19016 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
19021 MIPS_INVAL("MSA instruction");
19022 generate_exception_end(ctx
, EXCP_RI
);
19024 tcg_temp_free_i32(twd
);
19025 tcg_temp_free_i32(tws
);
19026 tcg_temp_free_i32(tn
);
19027 tcg_temp_free_i32(tdf
);
19030 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
19032 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
19033 uint32_t df
= 0, n
= 0;
19035 if ((dfn
& 0x30) == 0x00) {
19038 } else if ((dfn
& 0x38) == 0x20) {
19041 } else if ((dfn
& 0x3c) == 0x30) {
19044 } else if ((dfn
& 0x3e) == 0x38) {
19047 } else if (dfn
== 0x3E) {
19048 /* CTCMSA, CFCMSA, MOVE.V */
19049 gen_msa_elm_3e(env
, ctx
);
19052 generate_exception_end(ctx
, EXCP_RI
);
19056 gen_msa_elm_df(env
, ctx
, df
, n
);
19059 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19061 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
19062 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
19063 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19064 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19065 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19067 TCGv_i32 twd
= tcg_const_i32(wd
);
19068 TCGv_i32 tws
= tcg_const_i32(ws
);
19069 TCGv_i32 twt
= tcg_const_i32(wt
);
19070 TCGv_i32 tdf
= tcg_temp_new_i32();
19072 /* adjust df value for floating-point instruction */
19073 tcg_gen_movi_i32(tdf
, df
+ 2);
19075 switch (MASK_MSA_3RF(ctx
->opcode
)) {
19077 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19080 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19083 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19086 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19089 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19092 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19095 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
19098 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19101 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19104 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
19107 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19110 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19113 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
19116 tcg_gen_movi_i32(tdf
, df
+ 1);
19117 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19120 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19123 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
19125 case OPC_MADD_Q_df
:
19126 tcg_gen_movi_i32(tdf
, df
+ 1);
19127 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19130 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19132 case OPC_MSUB_Q_df
:
19133 tcg_gen_movi_i32(tdf
, df
+ 1);
19134 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19137 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19140 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
19143 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
19146 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
19149 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
19152 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
19155 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19158 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19161 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
19164 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
19167 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
19170 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
19173 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
19175 case OPC_MULR_Q_df
:
19176 tcg_gen_movi_i32(tdf
, df
+ 1);
19177 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19180 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
19182 case OPC_FMIN_A_df
:
19183 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19185 case OPC_MADDR_Q_df
:
19186 tcg_gen_movi_i32(tdf
, df
+ 1);
19187 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19190 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
19193 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
19195 case OPC_MSUBR_Q_df
:
19196 tcg_gen_movi_i32(tdf
, df
+ 1);
19197 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
19200 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
19202 case OPC_FMAX_A_df
:
19203 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
19206 MIPS_INVAL("MSA instruction");
19207 generate_exception_end(ctx
, EXCP_RI
);
19211 tcg_temp_free_i32(twd
);
19212 tcg_temp_free_i32(tws
);
19213 tcg_temp_free_i32(twt
);
19214 tcg_temp_free_i32(tdf
);
19217 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
19219 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19220 (op & (0x7 << 18)))
19221 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19222 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19223 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19224 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
19225 TCGv_i32 twd
= tcg_const_i32(wd
);
19226 TCGv_i32 tws
= tcg_const_i32(ws
);
19227 TCGv_i32 twt
= tcg_const_i32(wt
);
19228 TCGv_i32 tdf
= tcg_const_i32(df
);
19230 switch (MASK_MSA_2R(ctx
->opcode
)) {
19232 #if !defined(TARGET_MIPS64)
19233 /* Double format valid only for MIPS64 */
19234 if (df
== DF_DOUBLE
) {
19235 generate_exception_end(ctx
, EXCP_RI
);
19239 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
19242 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
19245 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
19248 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
19251 MIPS_INVAL("MSA instruction");
19252 generate_exception_end(ctx
, EXCP_RI
);
19256 tcg_temp_free_i32(twd
);
19257 tcg_temp_free_i32(tws
);
19258 tcg_temp_free_i32(twt
);
19259 tcg_temp_free_i32(tdf
);
19262 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
19264 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
19265 (op & (0xf << 17)))
19266 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19267 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19268 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19269 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
19270 TCGv_i32 twd
= tcg_const_i32(wd
);
19271 TCGv_i32 tws
= tcg_const_i32(ws
);
19272 TCGv_i32 twt
= tcg_const_i32(wt
);
19273 /* adjust df value for floating-point instruction */
19274 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
19276 switch (MASK_MSA_2RF(ctx
->opcode
)) {
19277 case OPC_FCLASS_df
:
19278 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
19280 case OPC_FTRUNC_S_df
:
19281 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
19283 case OPC_FTRUNC_U_df
:
19284 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19287 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19289 case OPC_FRSQRT_df
:
19290 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19293 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19296 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19299 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19301 case OPC_FEXUPL_df
:
19302 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19304 case OPC_FEXUPR_df
:
19305 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19308 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19311 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19313 case OPC_FTINT_S_df
:
19314 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19316 case OPC_FTINT_U_df
:
19317 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19319 case OPC_FFINT_S_df
:
19320 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19322 case OPC_FFINT_U_df
:
19323 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19327 tcg_temp_free_i32(twd
);
19328 tcg_temp_free_i32(tws
);
19329 tcg_temp_free_i32(twt
);
19330 tcg_temp_free_i32(tdf
);
19333 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19335 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19336 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19337 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19338 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19339 TCGv_i32 twd
= tcg_const_i32(wd
);
19340 TCGv_i32 tws
= tcg_const_i32(ws
);
19341 TCGv_i32 twt
= tcg_const_i32(wt
);
19343 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19345 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19348 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19351 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19354 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19357 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19360 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19363 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19366 MIPS_INVAL("MSA instruction");
19367 generate_exception_end(ctx
, EXCP_RI
);
19371 tcg_temp_free_i32(twd
);
19372 tcg_temp_free_i32(tws
);
19373 tcg_temp_free_i32(twt
);
19376 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19378 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19386 gen_msa_vec_v(env
, ctx
);
19389 gen_msa_2r(env
, ctx
);
19392 gen_msa_2rf(env
, ctx
);
19395 MIPS_INVAL("MSA instruction");
19396 generate_exception_end(ctx
, EXCP_RI
);
19401 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19403 uint32_t opcode
= ctx
->opcode
;
19404 check_insn(ctx
, ASE_MSA
);
19405 check_msa_access(ctx
);
19407 switch (MASK_MSA_MINOR(opcode
)) {
19408 case OPC_MSA_I8_00
:
19409 case OPC_MSA_I8_01
:
19410 case OPC_MSA_I8_02
:
19411 gen_msa_i8(env
, ctx
);
19413 case OPC_MSA_I5_06
:
19414 case OPC_MSA_I5_07
:
19415 gen_msa_i5(env
, ctx
);
19417 case OPC_MSA_BIT_09
:
19418 case OPC_MSA_BIT_0A
:
19419 gen_msa_bit(env
, ctx
);
19421 case OPC_MSA_3R_0D
:
19422 case OPC_MSA_3R_0E
:
19423 case OPC_MSA_3R_0F
:
19424 case OPC_MSA_3R_10
:
19425 case OPC_MSA_3R_11
:
19426 case OPC_MSA_3R_12
:
19427 case OPC_MSA_3R_13
:
19428 case OPC_MSA_3R_14
:
19429 case OPC_MSA_3R_15
:
19430 gen_msa_3r(env
, ctx
);
19433 gen_msa_elm(env
, ctx
);
19435 case OPC_MSA_3RF_1A
:
19436 case OPC_MSA_3RF_1B
:
19437 case OPC_MSA_3RF_1C
:
19438 gen_msa_3rf(env
, ctx
);
19441 gen_msa_vec(env
, ctx
);
19452 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19453 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19454 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19455 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19457 TCGv_i32 twd
= tcg_const_i32(wd
);
19458 TCGv taddr
= tcg_temp_new();
19459 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19461 switch (MASK_MSA_MINOR(opcode
)) {
19463 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19466 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19469 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19472 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19475 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19478 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19481 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19484 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19488 tcg_temp_free_i32(twd
);
19489 tcg_temp_free(taddr
);
19493 MIPS_INVAL("MSA instruction");
19494 generate_exception_end(ctx
, EXCP_RI
);
19500 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19503 int rs
, rt
, rd
, sa
;
19507 /* make sure instructions are on a word boundary */
19508 if (ctx
->pc
& 0x3) {
19509 env
->CP0_BadVAddr
= ctx
->pc
;
19510 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19514 /* Handle blikely not taken case */
19515 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19516 TCGLabel
*l1
= gen_new_label();
19518 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19519 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19520 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19524 op
= MASK_OP_MAJOR(ctx
->opcode
);
19525 rs
= (ctx
->opcode
>> 21) & 0x1f;
19526 rt
= (ctx
->opcode
>> 16) & 0x1f;
19527 rd
= (ctx
->opcode
>> 11) & 0x1f;
19528 sa
= (ctx
->opcode
>> 6) & 0x1f;
19529 imm
= (int16_t)ctx
->opcode
;
19532 decode_opc_special(env
, ctx
);
19535 decode_opc_special2_legacy(env
, ctx
);
19538 decode_opc_special3(env
, ctx
);
19541 op1
= MASK_REGIMM(ctx
->opcode
);
19543 case OPC_BLTZL
: /* REGIMM branches */
19547 check_insn(ctx
, ISA_MIPS2
);
19548 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19552 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19556 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19558 /* OPC_NAL, OPC_BAL */
19559 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19561 generate_exception_end(ctx
, EXCP_RI
);
19564 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19567 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19569 check_insn(ctx
, ISA_MIPS2
);
19570 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19571 gen_trap(ctx
, op1
, rs
, -1, imm
);
19574 check_insn(ctx
, ISA_MIPS32R6
);
19575 generate_exception_end(ctx
, EXCP_RI
);
19578 check_insn(ctx
, ISA_MIPS32R2
);
19579 /* Break the TB to be able to sync copied instructions
19581 ctx
->bstate
= BS_STOP
;
19583 case OPC_BPOSGE32
: /* MIPS DSP branch */
19584 #if defined(TARGET_MIPS64)
19588 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19590 #if defined(TARGET_MIPS64)
19592 check_insn(ctx
, ISA_MIPS32R6
);
19593 check_mips_64(ctx
);
19595 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19599 check_insn(ctx
, ISA_MIPS32R6
);
19600 check_mips_64(ctx
);
19602 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19606 default: /* Invalid */
19607 MIPS_INVAL("regimm");
19608 generate_exception_end(ctx
, EXCP_RI
);
19613 check_cp0_enabled(ctx
);
19614 op1
= MASK_CP0(ctx
->opcode
);
19622 #if defined(TARGET_MIPS64)
19626 #ifndef CONFIG_USER_ONLY
19627 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19628 #endif /* !CONFIG_USER_ONLY */
19630 case OPC_C0_FIRST
... OPC_C0_LAST
:
19631 #ifndef CONFIG_USER_ONLY
19632 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19633 #endif /* !CONFIG_USER_ONLY */
19636 #ifndef CONFIG_USER_ONLY
19639 TCGv t0
= tcg_temp_new();
19641 op2
= MASK_MFMC0(ctx
->opcode
);
19644 check_insn(ctx
, ASE_MT
);
19645 gen_helper_dmt(t0
);
19646 gen_store_gpr(t0
, rt
);
19649 check_insn(ctx
, ASE_MT
);
19650 gen_helper_emt(t0
);
19651 gen_store_gpr(t0
, rt
);
19654 check_insn(ctx
, ASE_MT
);
19655 gen_helper_dvpe(t0
, cpu_env
);
19656 gen_store_gpr(t0
, rt
);
19659 check_insn(ctx
, ASE_MT
);
19660 gen_helper_evpe(t0
, cpu_env
);
19661 gen_store_gpr(t0
, rt
);
19664 check_insn(ctx
, ISA_MIPS32R6
);
19666 gen_helper_dvp(t0
, cpu_env
);
19667 gen_store_gpr(t0
, rt
);
19671 check_insn(ctx
, ISA_MIPS32R6
);
19673 gen_helper_evp(t0
, cpu_env
);
19674 gen_store_gpr(t0
, rt
);
19678 check_insn(ctx
, ISA_MIPS32R2
);
19679 save_cpu_state(ctx
, 1);
19680 gen_helper_di(t0
, cpu_env
);
19681 gen_store_gpr(t0
, rt
);
19682 /* Stop translation as we may have switched
19683 the execution mode. */
19684 ctx
->bstate
= BS_STOP
;
19687 check_insn(ctx
, ISA_MIPS32R2
);
19688 save_cpu_state(ctx
, 1);
19689 gen_helper_ei(t0
, cpu_env
);
19690 gen_store_gpr(t0
, rt
);
19691 /* Stop translation as we may have switched
19692 the execution mode. */
19693 ctx
->bstate
= BS_STOP
;
19695 default: /* Invalid */
19696 MIPS_INVAL("mfmc0");
19697 generate_exception_end(ctx
, EXCP_RI
);
19702 #endif /* !CONFIG_USER_ONLY */
19705 check_insn(ctx
, ISA_MIPS32R2
);
19706 gen_load_srsgpr(rt
, rd
);
19709 check_insn(ctx
, ISA_MIPS32R2
);
19710 gen_store_srsgpr(rt
, rd
);
19714 generate_exception_end(ctx
, EXCP_RI
);
19718 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19719 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19720 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19721 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19724 /* Arithmetic with immediate opcode */
19725 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19729 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19731 case OPC_SLTI
: /* Set on less than with immediate opcode */
19733 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19735 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19736 case OPC_LUI
: /* OPC_AUI */
19739 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19741 case OPC_J
... OPC_JAL
: /* Jump */
19742 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19743 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19746 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19747 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19749 generate_exception_end(ctx
, EXCP_RI
);
19752 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19753 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19756 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19759 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19760 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19762 generate_exception_end(ctx
, EXCP_RI
);
19765 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19766 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19769 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19772 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19775 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19777 check_insn(ctx
, ISA_MIPS32R6
);
19778 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19779 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19782 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19785 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19787 check_insn(ctx
, ISA_MIPS32R6
);
19788 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19789 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19794 check_insn(ctx
, ISA_MIPS2
);
19795 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19799 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19801 case OPC_LL
: /* Load and stores */
19802 check_insn(ctx
, ISA_MIPS2
);
19806 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19808 case OPC_LB
... OPC_LH
:
19809 case OPC_LW
... OPC_LHU
:
19810 gen_ld(ctx
, op
, rt
, rs
, imm
);
19814 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19816 case OPC_SB
... OPC_SH
:
19818 gen_st(ctx
, op
, rt
, rs
, imm
);
19821 check_insn(ctx
, ISA_MIPS2
);
19822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19823 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19826 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19827 check_cp0_enabled(ctx
);
19828 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19829 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19830 gen_cache_operation(ctx
, rt
, rs
, imm
);
19832 /* Treat as NOP. */
19835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19836 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19837 /* Treat as NOP. */
19840 /* Floating point (COP1). */
19845 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19849 op1
= MASK_CP1(ctx
->opcode
);
19854 check_cp1_enabled(ctx
);
19855 check_insn(ctx
, ISA_MIPS32R2
);
19860 check_cp1_enabled(ctx
);
19861 gen_cp1(ctx
, op1
, rt
, rd
);
19863 #if defined(TARGET_MIPS64)
19866 check_cp1_enabled(ctx
);
19867 check_insn(ctx
, ISA_MIPS3
);
19868 check_mips_64(ctx
);
19869 gen_cp1(ctx
, op1
, rt
, rd
);
19872 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19873 check_cp1_enabled(ctx
);
19874 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19876 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19881 check_insn(ctx
, ASE_MIPS3D
);
19882 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19883 (rt
>> 2) & 0x7, imm
<< 2);
19887 check_cp1_enabled(ctx
);
19888 check_insn(ctx
, ISA_MIPS32R6
);
19889 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19893 check_cp1_enabled(ctx
);
19894 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19896 check_insn(ctx
, ASE_MIPS3D
);
19899 check_cp1_enabled(ctx
);
19900 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19901 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19902 (rt
>> 2) & 0x7, imm
<< 2);
19909 check_cp1_enabled(ctx
);
19910 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19916 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19917 check_cp1_enabled(ctx
);
19918 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19920 case R6_OPC_CMP_AF_S
:
19921 case R6_OPC_CMP_UN_S
:
19922 case R6_OPC_CMP_EQ_S
:
19923 case R6_OPC_CMP_UEQ_S
:
19924 case R6_OPC_CMP_LT_S
:
19925 case R6_OPC_CMP_ULT_S
:
19926 case R6_OPC_CMP_LE_S
:
19927 case R6_OPC_CMP_ULE_S
:
19928 case R6_OPC_CMP_SAF_S
:
19929 case R6_OPC_CMP_SUN_S
:
19930 case R6_OPC_CMP_SEQ_S
:
19931 case R6_OPC_CMP_SEUQ_S
:
19932 case R6_OPC_CMP_SLT_S
:
19933 case R6_OPC_CMP_SULT_S
:
19934 case R6_OPC_CMP_SLE_S
:
19935 case R6_OPC_CMP_SULE_S
:
19936 case R6_OPC_CMP_OR_S
:
19937 case R6_OPC_CMP_UNE_S
:
19938 case R6_OPC_CMP_NE_S
:
19939 case R6_OPC_CMP_SOR_S
:
19940 case R6_OPC_CMP_SUNE_S
:
19941 case R6_OPC_CMP_SNE_S
:
19942 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19944 case R6_OPC_CMP_AF_D
:
19945 case R6_OPC_CMP_UN_D
:
19946 case R6_OPC_CMP_EQ_D
:
19947 case R6_OPC_CMP_UEQ_D
:
19948 case R6_OPC_CMP_LT_D
:
19949 case R6_OPC_CMP_ULT_D
:
19950 case R6_OPC_CMP_LE_D
:
19951 case R6_OPC_CMP_ULE_D
:
19952 case R6_OPC_CMP_SAF_D
:
19953 case R6_OPC_CMP_SUN_D
:
19954 case R6_OPC_CMP_SEQ_D
:
19955 case R6_OPC_CMP_SEUQ_D
:
19956 case R6_OPC_CMP_SLT_D
:
19957 case R6_OPC_CMP_SULT_D
:
19958 case R6_OPC_CMP_SLE_D
:
19959 case R6_OPC_CMP_SULE_D
:
19960 case R6_OPC_CMP_OR_D
:
19961 case R6_OPC_CMP_UNE_D
:
19962 case R6_OPC_CMP_NE_D
:
19963 case R6_OPC_CMP_SOR_D
:
19964 case R6_OPC_CMP_SUNE_D
:
19965 case R6_OPC_CMP_SNE_D
:
19966 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19969 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19970 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19975 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19990 check_insn(ctx
, ASE_MSA
);
19991 gen_msa_branch(env
, ctx
, op1
);
19995 generate_exception_end(ctx
, EXCP_RI
);
20000 /* Compact branches [R6] and COP2 [non-R6] */
20001 case OPC_BC
: /* OPC_LWC2 */
20002 case OPC_BALC
: /* OPC_SWC2 */
20003 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20004 /* OPC_BC, OPC_BALC */
20005 gen_compute_compact_branch(ctx
, op
, 0, 0,
20006 sextract32(ctx
->opcode
<< 2, 0, 28));
20008 /* OPC_LWC2, OPC_SWC2 */
20009 /* COP2: Not implemented. */
20010 generate_exception_err(ctx
, EXCP_CpU
, 2);
20013 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
20014 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
20015 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20017 /* OPC_BEQZC, OPC_BNEZC */
20018 gen_compute_compact_branch(ctx
, op
, rs
, 0,
20019 sextract32(ctx
->opcode
<< 2, 0, 23));
20021 /* OPC_JIC, OPC_JIALC */
20022 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
20025 /* OPC_LWC2, OPC_SWC2 */
20026 /* COP2: Not implemented. */
20027 generate_exception_err(ctx
, EXCP_CpU
, 2);
20031 check_insn(ctx
, INSN_LOONGSON2F
);
20032 /* Note that these instructions use different fields. */
20033 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
20037 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20038 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20039 check_cp1_enabled(ctx
);
20040 op1
= MASK_CP3(ctx
->opcode
);
20044 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20050 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20051 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
20054 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20055 /* Treat as NOP. */
20058 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
20072 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
20073 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
20077 generate_exception_end(ctx
, EXCP_RI
);
20081 generate_exception_err(ctx
, EXCP_CpU
, 1);
20085 #if defined(TARGET_MIPS64)
20086 /* MIPS64 opcodes */
20087 case OPC_LDL
... OPC_LDR
:
20089 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20093 check_insn(ctx
, ISA_MIPS3
);
20094 check_mips_64(ctx
);
20095 gen_ld(ctx
, op
, rt
, rs
, imm
);
20097 case OPC_SDL
... OPC_SDR
:
20098 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20101 check_insn(ctx
, ISA_MIPS3
);
20102 check_mips_64(ctx
);
20103 gen_st(ctx
, op
, rt
, rs
, imm
);
20106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
20107 check_insn(ctx
, ISA_MIPS3
);
20108 check_mips_64(ctx
);
20109 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
20111 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
20112 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20113 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
20114 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20117 check_insn(ctx
, ISA_MIPS3
);
20118 check_mips_64(ctx
);
20119 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20123 check_insn(ctx
, ISA_MIPS3
);
20124 check_mips_64(ctx
);
20125 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
20128 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
20129 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20130 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
20132 MIPS_INVAL("major opcode");
20133 generate_exception_end(ctx
, EXCP_RI
);
20137 case OPC_DAUI
: /* OPC_JALX */
20138 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
20139 #if defined(TARGET_MIPS64)
20141 check_mips_64(ctx
);
20143 generate_exception(ctx
, EXCP_RI
);
20144 } else if (rt
!= 0) {
20145 TCGv t0
= tcg_temp_new();
20146 gen_load_gpr(t0
, rs
);
20147 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
20151 generate_exception_end(ctx
, EXCP_RI
);
20152 MIPS_INVAL("major opcode");
20156 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
20157 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
20158 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
20161 case OPC_MSA
: /* OPC_MDMX */
20162 /* MDMX: Not implemented. */
20166 check_insn(ctx
, ISA_MIPS32R6
);
20167 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
20169 default: /* Invalid */
20170 MIPS_INVAL("major opcode");
20171 generate_exception_end(ctx
, EXCP_RI
);
20176 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
20178 CPUMIPSState
*env
= cs
->env_ptr
;
20180 target_ulong pc_start
;
20181 target_ulong next_page_start
;
20188 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
20191 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
20192 ctx
.insn_flags
= env
->insn_flags
;
20193 ctx
.CP0_Config1
= env
->CP0_Config1
;
20195 ctx
.bstate
= BS_NONE
;
20197 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
20198 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
20199 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
20200 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
20201 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
20202 ctx
.PAMask
= env
->PAMask
;
20203 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
20204 ctx
.eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
20205 ctx
.sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
20206 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
20207 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
20208 /* Restore delay slot state from the tb context. */
20209 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
20210 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
20211 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
20212 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
20213 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
20214 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
20215 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
20216 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
20217 restore_cpu_state(env
, &ctx
);
20218 #ifdef CONFIG_USER_ONLY
20219 ctx
.mem_idx
= MIPS_HFLAG_UM
;
20221 ctx
.mem_idx
= hflags_mmu_index(ctx
.hflags
);
20223 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
20224 MO_UNALN
: MO_ALIGN
;
20226 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
20227 if (max_insns
== 0) {
20228 max_insns
= CF_COUNT_MASK
;
20230 if (max_insns
> TCG_MAX_INSNS
) {
20231 max_insns
= TCG_MAX_INSNS
;
20234 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
20236 while (ctx
.bstate
== BS_NONE
) {
20237 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
20240 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
20241 save_cpu_state(&ctx
, 1);
20242 ctx
.bstate
= BS_BRANCH
;
20243 gen_helper_raise_exception_debug(cpu_env
);
20244 /* The address covered by the breakpoint must be included in
20245 [tb->pc, tb->pc + tb->size) in order to for it to be
20246 properly cleared -- thus we increment the PC here so that
20247 the logic setting tb->size below does the right thing. */
20249 goto done_generating
;
20252 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
20256 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
20257 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
20258 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
20260 decode_opc(env
, &ctx
);
20261 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
20262 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20263 insn_bytes
= decode_micromips_opc(env
, &ctx
);
20264 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
20265 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
20266 insn_bytes
= decode_mips16_opc(env
, &ctx
);
20268 generate_exception_end(&ctx
, EXCP_RI
);
20272 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
20273 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
20274 MIPS_HFLAG_FBNSLOT
))) {
20275 /* force to generate branch as there is neither delay nor
20279 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
20280 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
20281 /* Force to generate branch as microMIPS R6 doesn't restrict
20282 branches in the forbidden slot. */
20287 gen_branch(&ctx
, insn_bytes
);
20289 ctx
.pc
+= insn_bytes
;
20291 /* Execute a branch and its delay slot as a single instruction.
20292 This is what GDB expects and is consistent with what the
20293 hardware does (e.g. if a delay slot instruction faults, the
20294 reported PC is the PC of the branch). */
20295 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20299 if (ctx
.pc
>= next_page_start
) {
20303 if (tcg_op_buf_full()) {
20307 if (num_insns
>= max_insns
)
20313 if (tb
->cflags
& CF_LAST_IO
) {
20316 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20317 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20318 gen_helper_raise_exception_debug(cpu_env
);
20320 switch (ctx
.bstate
) {
20322 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20325 save_cpu_state(&ctx
, 0);
20326 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20329 tcg_gen_exit_tb(0);
20337 gen_tb_end(tb
, num_insns
);
20339 tb
->size
= ctx
.pc
- pc_start
;
20340 tb
->icount
= num_insns
;
20344 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20345 && qemu_log_in_addr_range(pc_start
)) {
20347 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20348 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20355 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20359 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20361 #define printfpr(fp) \
20364 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20365 " fd:%13g fs:%13g psu: %13g\n", \
20366 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20367 (double)(fp)->fd, \
20368 (double)(fp)->fs[FP_ENDIAN_IDX], \
20369 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20372 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20373 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20374 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20375 " fd:%13g fs:%13g psu:%13g\n", \
20376 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20378 (double)tmp.fs[FP_ENDIAN_IDX], \
20379 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20384 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20385 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20386 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20387 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20388 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20389 printfpr(&env
->active_fpu
.fpr
[i
]);
20395 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20398 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20399 CPUMIPSState
*env
= &cpu
->env
;
20402 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20403 " LO=0x" TARGET_FMT_lx
" ds %04x "
20404 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20405 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20406 env
->hflags
, env
->btarget
, env
->bcond
);
20407 for (i
= 0; i
< 32; i
++) {
20409 cpu_fprintf(f
, "GPR%02d:", i
);
20410 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20412 cpu_fprintf(f
, "\n");
20415 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20416 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20417 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20419 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20420 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20421 env
->CP0_Config2
, env
->CP0_Config3
);
20422 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20423 env
->CP0_Config4
, env
->CP0_Config5
);
20424 if (env
->hflags
& MIPS_HFLAG_FPU
)
20425 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20428 void mips_tcg_init(void)
20433 /* Initialize various static tables. */
20437 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20438 tcg_ctx
.tcg_env
= cpu_env
;
20440 TCGV_UNUSED(cpu_gpr
[0]);
20441 for (i
= 1; i
< 32; i
++)
20442 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20443 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20446 for (i
= 0; i
< 32; i
++) {
20447 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20449 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20450 /* The scalar floating-point unit (FPU) registers are mapped on
20451 * the MSA vector registers. */
20452 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20453 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20454 msa_wr_d
[i
* 2 + 1] =
20455 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20458 cpu_PC
= tcg_global_mem_new(cpu_env
,
20459 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20460 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20461 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20462 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20464 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20465 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20468 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20469 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20471 bcond
= tcg_global_mem_new(cpu_env
,
20472 offsetof(CPUMIPSState
, bcond
), "bcond");
20473 btarget
= tcg_global_mem_new(cpu_env
,
20474 offsetof(CPUMIPSState
, btarget
), "btarget");
20475 hflags
= tcg_global_mem_new_i32(cpu_env
,
20476 offsetof(CPUMIPSState
, hflags
), "hflags");
20478 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20479 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20481 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20482 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20488 #include "translate_init.c"
20490 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20494 const mips_def_t
*def
;
20496 def
= cpu_mips_find_by_name(cpu_model
);
20499 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20501 env
->cpu_model
= def
;
20502 env
->exception_base
= (int32_t)0xBFC00000;
20504 #ifndef CONFIG_USER_ONLY
20505 mmu_init(env
, def
);
20507 fpu_init(env
, def
);
20508 mvp_init(env
, def
);
20510 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20515 bool cpu_supports_cps_smp(const char *cpu_model
)
20517 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20522 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20525 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20527 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20532 return (def
->insn_flags
& isa
) != 0;
20535 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20537 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20538 vp
->env
.exception_base
= address
;
20541 void cpu_state_reset(CPUMIPSState
*env
)
20543 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20544 CPUState
*cs
= CPU(cpu
);
20546 /* Reset registers to their default values */
20547 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20548 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20549 #ifdef TARGET_WORDS_BIGENDIAN
20550 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20552 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20553 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20554 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20555 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20556 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20557 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20558 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20559 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20560 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20561 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20562 << env
->cpu_model
->CP0_LLAddr_shift
;
20563 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20564 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20565 env
->CCRes
= env
->cpu_model
->CCRes
;
20566 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20567 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20568 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20569 env
->current_tc
= 0;
20570 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20571 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20572 #if defined(TARGET_MIPS64)
20573 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20574 env
->SEGMask
|= 3ULL << 62;
20577 env
->PABITS
= env
->cpu_model
->PABITS
;
20578 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20579 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20580 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20581 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20582 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20583 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20584 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20585 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20586 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20587 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20588 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20589 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20590 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
20591 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20592 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20593 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20594 env
->msair
= env
->cpu_model
->MSAIR
;
20595 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20597 #if defined(CONFIG_USER_ONLY)
20598 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20599 # ifdef TARGET_MIPS64
20600 /* Enable 64-bit register mode. */
20601 env
->CP0_Status
|= (1 << CP0St_PX
);
20603 # ifdef TARGET_ABI_MIPSN64
20604 /* Enable 64-bit address mode. */
20605 env
->CP0_Status
|= (1 << CP0St_UX
);
20607 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20608 hardware registers. */
20609 env
->CP0_HWREna
|= 0x0000000F;
20610 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20611 env
->CP0_Status
|= (1 << CP0St_CU1
);
20613 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20614 env
->CP0_Status
|= (1 << CP0St_MX
);
20616 # if defined(TARGET_MIPS64)
20617 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20618 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20619 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20620 env
->CP0_Status
|= (1 << CP0St_FR
);
20624 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20625 /* If the exception was raised from a delay slot,
20626 come back to the jump. */
20627 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20628 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20630 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20632 env
->active_tc
.PC
= env
->exception_base
;
20633 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20634 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20635 env
->CP0_Wired
= 0;
20636 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20637 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20638 if (mips_um_ksegs_enabled()) {
20639 env
->CP0_EBase
|= 0x40000000;
20641 env
->CP0_EBase
|= (int32_t)0x80000000;
20643 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20644 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20646 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20648 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20649 /* vectored interrupts not implemented, timer on int 7,
20650 no performance counters. */
20651 env
->CP0_IntCtl
= 0xe0000000;
20655 for (i
= 0; i
< 7; i
++) {
20656 env
->CP0_WatchLo
[i
] = 0;
20657 env
->CP0_WatchHi
[i
] = 0x80000000;
20659 env
->CP0_WatchLo
[7] = 0;
20660 env
->CP0_WatchHi
[7] = 0;
20662 /* Count register increments in debug mode, EJTAG version 1 */
20663 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20665 cpu_mips_store_count(env
, 1);
20667 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20670 /* Only TC0 on VPE 0 starts as active. */
20671 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20672 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20673 env
->tcs
[i
].CP0_TCHalt
= 1;
20675 env
->active_tc
.CP0_TCHalt
= 1;
20678 if (cs
->cpu_index
== 0) {
20679 /* VPE0 starts up enabled. */
20680 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20681 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20683 /* TC0 starts up unhalted. */
20685 env
->active_tc
.CP0_TCHalt
= 0;
20686 env
->tcs
[0].CP0_TCHalt
= 0;
20687 /* With thread 0 active. */
20688 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20689 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20694 * Configure default legacy segmentation control. We use this regardless of
20695 * whether segmentation control is presented to the guest.
20697 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
20698 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
20699 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
20700 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
20701 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
20702 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20704 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
20705 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
20706 (3 << CP0SC_C
)) << 16;
20707 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
20708 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20709 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
20710 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
20711 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
20712 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
20713 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
20714 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
20716 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20717 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20718 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20719 env
->CP0_Status
|= (1 << CP0St_FR
);
20723 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20727 compute_hflags(env
);
20728 restore_fp_status(env
);
20729 restore_pamask(env
);
20730 cs
->exception_index
= EXCP_NONE
;
20732 if (semihosting_get_argc()) {
20733 /* UHI interface can be used to obtain argc and argv */
20734 env
->active_tc
.gpr
[4] = -1;
20738 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20739 target_ulong
*data
)
20741 env
->active_tc
.PC
= data
[0];
20742 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20743 env
->hflags
|= data
[1];
20744 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20745 case MIPS_HFLAG_BR
:
20747 case MIPS_HFLAG_BC
:
20748 case MIPS_HFLAG_BL
:
20750 env
->btarget
= data
[2];