2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "target/mips/trace.h"
37 #include "trace-tcg.h"
40 #define MIPS_DEBUG_DISAS 0
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL
= (0x00 << 26),
48 OPC_REGIMM
= (0x01 << 26),
49 OPC_CP0
= (0x10 << 26),
50 OPC_CP1
= (0x11 << 26),
51 OPC_CP2
= (0x12 << 26),
52 OPC_CP3
= (0x13 << 26),
53 OPC_SPECIAL2
= (0x1C << 26),
54 OPC_SPECIAL3
= (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI
= (0x08 << 26),
57 OPC_ADDIU
= (0x09 << 26),
58 OPC_SLTI
= (0x0A << 26),
59 OPC_SLTIU
= (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI
= (0x18 << 26),
67 OPC_DADDIU
= (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL
= (0x03 << 26),
71 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL
= (0x14 << 26),
73 OPC_BNE
= (0x05 << 26),
74 OPC_BNEL
= (0x15 << 26),
75 OPC_BLEZ
= (0x06 << 26),
76 OPC_BLEZL
= (0x16 << 26),
77 OPC_BGTZ
= (0x07 << 26),
78 OPC_BGTZL
= (0x17 << 26),
79 OPC_JALX
= (0x1D << 26),
80 OPC_DAUI
= (0x1D << 26),
82 OPC_LDL
= (0x1A << 26),
83 OPC_LDR
= (0x1B << 26),
84 OPC_LB
= (0x20 << 26),
85 OPC_LH
= (0x21 << 26),
86 OPC_LWL
= (0x22 << 26),
87 OPC_LW
= (0x23 << 26),
88 OPC_LWPC
= OPC_LW
| 0x5,
89 OPC_LBU
= (0x24 << 26),
90 OPC_LHU
= (0x25 << 26),
91 OPC_LWR
= (0x26 << 26),
92 OPC_LWU
= (0x27 << 26),
93 OPC_SB
= (0x28 << 26),
94 OPC_SH
= (0x29 << 26),
95 OPC_SWL
= (0x2A << 26),
96 OPC_SW
= (0x2B << 26),
97 OPC_SDL
= (0x2C << 26),
98 OPC_SDR
= (0x2D << 26),
99 OPC_SWR
= (0x2E << 26),
100 OPC_LL
= (0x30 << 26),
101 OPC_LLD
= (0x34 << 26),
102 OPC_LD
= (0x37 << 26),
103 OPC_LDPC
= OPC_LD
| 0x5,
104 OPC_SC
= (0x38 << 26),
105 OPC_SCD
= (0x3C << 26),
106 OPC_SD
= (0x3F << 26),
107 /* Floating point load/store */
108 OPC_LWC1
= (0x31 << 26),
109 OPC_LWC2
= (0x32 << 26),
110 OPC_LDC1
= (0x35 << 26),
111 OPC_LDC2
= (0x36 << 26),
112 OPC_SWC1
= (0x39 << 26),
113 OPC_SWC2
= (0x3A << 26),
114 OPC_SDC1
= (0x3D << 26),
115 OPC_SDC2
= (0x3E << 26),
116 /* Compact Branches */
117 OPC_BLEZALC
= (0x06 << 26),
118 OPC_BGEZALC
= (0x06 << 26),
119 OPC_BGEUC
= (0x06 << 26),
120 OPC_BGTZALC
= (0x07 << 26),
121 OPC_BLTZALC
= (0x07 << 26),
122 OPC_BLTUC
= (0x07 << 26),
123 OPC_BOVC
= (0x08 << 26),
124 OPC_BEQZALC
= (0x08 << 26),
125 OPC_BEQC
= (0x08 << 26),
126 OPC_BLEZC
= (0x16 << 26),
127 OPC_BGEZC
= (0x16 << 26),
128 OPC_BGEC
= (0x16 << 26),
129 OPC_BGTZC
= (0x17 << 26),
130 OPC_BLTZC
= (0x17 << 26),
131 OPC_BLTC
= (0x17 << 26),
132 OPC_BNVC
= (0x18 << 26),
133 OPC_BNEZALC
= (0x18 << 26),
134 OPC_BNEC
= (0x18 << 26),
135 OPC_BC
= (0x32 << 26),
136 OPC_BEQZC
= (0x36 << 26),
137 OPC_JIC
= (0x36 << 26),
138 OPC_BALC
= (0x3A << 26),
139 OPC_BNEZC
= (0x3E << 26),
140 OPC_JIALC
= (0x3E << 26),
141 /* MDMX ASE specific */
142 OPC_MDMX
= (0x1E << 26),
143 /* MSA ASE, same as MDMX */
145 /* Cache and prefetch */
146 OPC_CACHE
= (0x2F << 26),
147 OPC_PREF
= (0x33 << 26),
148 /* PC-relative address computation / loads */
149 OPC_PCREL
= (0x3B << 26),
152 /* PC-relative address computation / loads */
153 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
154 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
156 /* Instructions determined by bits 19 and 20 */
157 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
158 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
159 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
161 /* Instructions determined by bits 16 ... 20 */
162 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
163 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
166 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
169 /* MIPS special opcodes */
170 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
174 OPC_SLL
= 0x00 | OPC_SPECIAL
,
175 /* NOP is SLL r0, r0, 0 */
176 /* SSNOP is SLL r0, r0, 1 */
177 /* EHB is SLL r0, r0, 3 */
178 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
179 OPC_ROTR
= OPC_SRL
| (1 << 21),
180 OPC_SRA
= 0x03 | OPC_SPECIAL
,
181 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
182 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
183 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
184 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
185 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
186 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
187 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
188 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
189 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
190 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
191 OPC_DROTR
= OPC_DSRL
| (1 << 21),
192 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
193 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
194 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
195 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
196 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
197 /* Multiplication / division */
198 OPC_MULT
= 0x18 | OPC_SPECIAL
,
199 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
200 OPC_DIV
= 0x1A | OPC_SPECIAL
,
201 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
202 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
203 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
204 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
205 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
207 /* 2 registers arithmetic / logic */
208 OPC_ADD
= 0x20 | OPC_SPECIAL
,
209 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
210 OPC_SUB
= 0x22 | OPC_SPECIAL
,
211 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
212 OPC_AND
= 0x24 | OPC_SPECIAL
,
213 OPC_OR
= 0x25 | OPC_SPECIAL
,
214 OPC_XOR
= 0x26 | OPC_SPECIAL
,
215 OPC_NOR
= 0x27 | OPC_SPECIAL
,
216 OPC_SLT
= 0x2A | OPC_SPECIAL
,
217 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
218 OPC_DADD
= 0x2C | OPC_SPECIAL
,
219 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
220 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
221 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
223 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
224 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
226 OPC_TGE
= 0x30 | OPC_SPECIAL
,
227 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
228 OPC_TLT
= 0x32 | OPC_SPECIAL
,
229 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
230 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
231 OPC_TNE
= 0x36 | OPC_SPECIAL
,
232 /* HI / LO registers load & stores */
233 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
234 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
235 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
236 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
237 /* Conditional moves */
238 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
239 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
241 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
242 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
244 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
247 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
248 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
249 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
250 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
251 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
253 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
254 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
255 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
256 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
259 /* R6 Multiply and Divide instructions have the same Opcode
260 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
261 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
264 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
265 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
266 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
267 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
268 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
269 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
270 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
271 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
273 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
274 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
275 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
276 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
277 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
278 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
279 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
280 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
282 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
283 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
284 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
285 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
286 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
288 OPC_LSA
= 0x05 | OPC_SPECIAL
,
289 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
292 /* Multiplication variants of the vr54xx. */
293 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
296 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
297 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
299 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
301 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
303 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
305 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
306 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
307 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
308 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
309 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
312 /* REGIMM (rt field) opcodes */
313 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
316 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
317 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
318 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
319 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
320 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
321 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
322 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
323 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
324 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
325 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
326 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
327 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
328 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
329 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
330 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
331 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
333 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
334 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
337 /* Special2 opcodes */
338 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
341 /* Multiply & xxx operations */
342 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
343 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
344 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
345 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
346 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
348 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
349 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
350 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
351 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
352 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
353 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
354 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
355 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
356 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
357 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
358 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
359 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
361 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
362 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
363 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
364 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
366 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
369 /* Special3 opcodes */
370 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
373 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
374 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
375 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
376 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
377 OPC_INS
= 0x04 | OPC_SPECIAL3
,
378 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
379 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
380 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
381 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
382 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
383 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
384 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
385 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
388 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
389 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
390 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
391 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
392 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
393 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
394 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
395 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
396 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
397 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
398 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
399 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
402 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
403 /* MIPS DSP Arithmetic */
404 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
405 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
407 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
408 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
409 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
410 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
411 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
412 /* MIPS DSP GPR-Based Shift Sub-class */
413 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
414 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
415 /* MIPS DSP Multiply Sub-class insns */
416 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
417 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
418 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
419 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
420 /* DSP Bit/Manipulation Sub-class */
421 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
422 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
423 /* MIPS DSP Append Sub-class */
424 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
425 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
426 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
427 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
428 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
431 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
432 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
433 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
434 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
435 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
436 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
440 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
443 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
444 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
445 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
446 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
447 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
448 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
452 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
455 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
456 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
457 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
458 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
459 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
462 /* MIPS DSP REGIMM opcodes */
464 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
465 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
468 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
471 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
472 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
473 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
474 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
477 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
479 /* MIPS DSP Arithmetic Sub-class */
480 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
497 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
498 /* MIPS DSP Multiply Sub-class insns */
499 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
504 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
507 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
508 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
510 /* MIPS DSP Arithmetic Sub-class */
511 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
523 /* MIPS DSP Multiply Sub-class insns */
524 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
527 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
530 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
532 /* MIPS DSP Arithmetic Sub-class */
533 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
546 /* DSP Bit/Manipulation Sub-class */
547 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
551 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
554 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
556 /* MIPS DSP Arithmetic Sub-class */
557 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
564 /* DSP Compare-Pick Sub-class */
565 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
579 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
582 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP GPR-Based Shift Sub-class */
585 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
606 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
609 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
611 /* MIPS DSP Multiply Sub-class insns */
612 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
633 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
636 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
638 /* DSP Bit/Manipulation Sub-class */
639 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
642 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
644 /* MIPS DSP Append Sub-class */
645 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
646 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
647 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
650 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
652 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
653 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
664 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
666 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
667 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
668 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
669 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
672 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
674 /* MIPS DSP Arithmetic Sub-class */
675 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
692 /* DSP Bit/Manipulation Sub-class */
693 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
698 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
701 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
703 /* MIPS DSP Multiply Sub-class insns */
704 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
709 /* MIPS DSP Arithmetic Sub-class */
710 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
730 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
733 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
735 /* DSP Compare-Pick Sub-class */
736 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
755 /* MIPS DSP Arithmetic Sub-class */
756 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
763 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
766 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
768 /* DSP Append Sub-class */
769 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
771 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
772 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
775 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
777 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
778 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
798 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
801 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
803 /* DSP Bit/Manipulation Sub-class */
804 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
807 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
809 /* MIPS DSP Multiply Sub-class insns */
810 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
835 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
838 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
840 /* MIPS DSP GPR-Based Shift Sub-class */
841 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
866 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
869 /* Coprocessor 0 (rs field) */
870 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
873 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
874 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
875 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
876 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
877 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
878 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
879 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
880 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
881 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
882 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
883 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
884 OPC_C0
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
886 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
890 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
893 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
895 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
896 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
897 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
899 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
900 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
903 /* Coprocessor 0 (with rs == C0) */
904 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
907 OPC_TLBR
= 0x01 | OPC_C0
,
908 OPC_TLBWI
= 0x02 | OPC_C0
,
909 OPC_TLBINV
= 0x03 | OPC_C0
,
910 OPC_TLBINVF
= 0x04 | OPC_C0
,
911 OPC_TLBWR
= 0x06 | OPC_C0
,
912 OPC_TLBP
= 0x08 | OPC_C0
,
913 OPC_RFE
= 0x10 | OPC_C0
,
914 OPC_ERET
= 0x18 | OPC_C0
,
915 OPC_DERET
= 0x1F | OPC_C0
,
916 OPC_WAIT
= 0x20 | OPC_C0
,
919 /* Coprocessor 1 (rs field) */
920 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
922 /* Values for the fmt field in FP instructions */
924 /* 0 - 15 are reserved */
925 FMT_S
= 16, /* single fp */
926 FMT_D
= 17, /* double fp */
927 FMT_E
= 18, /* extended fp */
928 FMT_Q
= 19, /* quad fp */
929 FMT_W
= 20, /* 32-bit fixed */
930 FMT_L
= 21, /* 64-bit fixed */
931 FMT_PS
= 22, /* paired single fp */
932 /* 23 - 31 are reserved */
936 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
937 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
938 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
939 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
940 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
941 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
942 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
943 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
944 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
945 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
946 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
947 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
948 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
949 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
950 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
951 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
952 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
953 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
954 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
955 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
956 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
957 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
958 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
959 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
960 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
961 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
962 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
963 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
964 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
965 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
968 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
969 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
972 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
973 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
974 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
975 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
979 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
980 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
984 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
985 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
988 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
991 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
992 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
993 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
994 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
995 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
996 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
997 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
998 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
999 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1000 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1001 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1004 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1007 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1016 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1025 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1028 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1029 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1030 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1031 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1034 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1043 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1050 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1057 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1064 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1071 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1078 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1085 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1092 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1097 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1101 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1104 OPC_LWXC1
= 0x00 | OPC_CP3
,
1105 OPC_LDXC1
= 0x01 | OPC_CP3
,
1106 OPC_LUXC1
= 0x05 | OPC_CP3
,
1107 OPC_SWXC1
= 0x08 | OPC_CP3
,
1108 OPC_SDXC1
= 0x09 | OPC_CP3
,
1109 OPC_SUXC1
= 0x0D | OPC_CP3
,
1110 OPC_PREFX
= 0x0F | OPC_CP3
,
1111 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1112 OPC_MADD_S
= 0x20 | OPC_CP3
,
1113 OPC_MADD_D
= 0x21 | OPC_CP3
,
1114 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1115 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1116 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1117 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1118 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1119 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1120 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1121 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1122 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1123 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1127 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1129 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1130 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1131 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1132 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1133 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1134 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1135 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1136 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1137 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1138 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1139 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1140 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1141 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1142 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1143 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1144 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1145 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1146 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1147 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1148 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1149 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1151 /* MI10 instruction */
1152 OPC_LD_B
= (0x20) | OPC_MSA
,
1153 OPC_LD_H
= (0x21) | OPC_MSA
,
1154 OPC_LD_W
= (0x22) | OPC_MSA
,
1155 OPC_LD_D
= (0x23) | OPC_MSA
,
1156 OPC_ST_B
= (0x24) | OPC_MSA
,
1157 OPC_ST_H
= (0x25) | OPC_MSA
,
1158 OPC_ST_W
= (0x26) | OPC_MSA
,
1159 OPC_ST_D
= (0x27) | OPC_MSA
,
1163 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1164 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1165 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1166 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1167 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1169 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1173 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1174 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1175 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1177 /* I8 instruction */
1178 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1179 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1180 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1181 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1182 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1183 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1184 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1185 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1186 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1187 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1189 /* VEC/2R/2RF instruction */
1190 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1191 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1192 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1193 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1194 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1195 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1196 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1199 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1201 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1202 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1203 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1204 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1205 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1207 /* 2RF instruction df(bit 16) = _w, _d */
1208 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1210 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1211 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1212 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1213 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1214 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1215 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1217 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1218 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1219 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1221 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1223 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1225 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1226 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1227 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1228 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1229 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1230 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1231 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1232 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1233 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1234 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1235 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1236 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1237 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1238 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1239 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1240 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1241 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1242 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1243 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1244 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1245 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1246 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1247 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1248 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1249 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1250 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1251 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1252 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1253 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1254 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1255 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1256 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1257 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1258 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1259 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1260 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1261 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1262 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1263 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1264 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1265 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1266 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1267 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1268 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1269 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1270 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1271 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1272 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1273 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1274 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1275 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1276 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1277 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1278 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1279 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1280 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1281 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1282 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1283 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1284 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1285 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1286 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1287 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1288 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1290 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1291 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1297 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1301 /* 3RF instruction _df(bit 21) = _w, _d */
1302 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1305 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1306 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1307 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1308 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1309 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1310 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1311 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1312 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1313 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1314 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1315 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1316 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1321 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1325 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1327 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1328 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1329 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1332 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1333 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1334 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1335 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1336 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1337 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1338 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1339 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1340 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1341 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1342 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1344 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1345 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1352 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1353 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1356 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1359 /* global register indices */
1360 static TCGv_env cpu_env
;
1361 static TCGv cpu_gpr
[32], cpu_PC
;
1362 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1363 static TCGv cpu_dspctrl
, btarget
, bcond
;
1364 static TCGv_i32 hflags
;
1365 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1366 static TCGv_i64 fpu_f64
[32];
1367 static TCGv_i64 msa_wr_d
[64];
1369 #include "exec/gen-icount.h"
1371 #define gen_helper_0e0i(name, arg) do { \
1372 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1373 gen_helper_##name(cpu_env, helper_tmp); \
1374 tcg_temp_free_i32(helper_tmp); \
1377 #define gen_helper_0e1i(name, arg1, arg2) do { \
1378 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1379 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1380 tcg_temp_free_i32(helper_tmp); \
1383 #define gen_helper_1e0i(name, ret, arg1) do { \
1384 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1385 gen_helper_##name(ret, cpu_env, helper_tmp); \
1386 tcg_temp_free_i32(helper_tmp); \
1389 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1390 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1391 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1392 tcg_temp_free_i32(helper_tmp); \
1395 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1396 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1397 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1398 tcg_temp_free_i32(helper_tmp); \
1401 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1402 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1403 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1404 tcg_temp_free_i32(helper_tmp); \
1407 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1408 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1409 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1410 tcg_temp_free_i32(helper_tmp); \
1413 typedef struct DisasContext
{
1414 struct TranslationBlock
*tb
;
1415 target_ulong pc
, saved_pc
;
1417 int singlestep_enabled
;
1419 int32_t CP0_Config1
;
1420 /* Routine used to access memory */
1422 TCGMemOp default_tcg_memop_mask
;
1423 uint32_t hflags
, saved_hflags
;
1425 target_ulong btarget
;
1434 int CP0_LLAddr_shift
;
1444 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1445 * exception condition */
1446 BS_STOP
= 1, /* We want to stop translation for any reason */
1447 BS_BRANCH
= 2, /* We reached a branch condition */
1448 BS_EXCP
= 3, /* We reached an exception condition */
1451 static const char * const regnames
[] = {
1452 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1453 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1454 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1455 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1458 static const char * const regnames_HI
[] = {
1459 "HI0", "HI1", "HI2", "HI3",
1462 static const char * const regnames_LO
[] = {
1463 "LO0", "LO1", "LO2", "LO3",
1466 static const char * const fregnames
[] = {
1467 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1468 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1469 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1470 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1473 static const char * const msaregnames
[] = {
1474 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1475 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1476 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1477 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1478 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1479 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1480 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1481 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1482 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1483 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1484 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1485 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1486 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1487 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1488 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1489 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1492 #define LOG_DISAS(...) \
1494 if (MIPS_DEBUG_DISAS) { \
1495 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1499 #define MIPS_INVAL(op) \
1501 if (MIPS_DEBUG_DISAS) { \
1502 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1503 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1504 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1505 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1509 /* General purpose registers moves. */
1510 static inline void gen_load_gpr (TCGv t
, int reg
)
1513 tcg_gen_movi_tl(t
, 0);
1515 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1518 static inline void gen_store_gpr (TCGv t
, int reg
)
1521 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1524 /* Moves to/from shadow registers. */
1525 static inline void gen_load_srsgpr (int from
, int to
)
1527 TCGv t0
= tcg_temp_new();
1530 tcg_gen_movi_tl(t0
, 0);
1532 TCGv_i32 t2
= tcg_temp_new_i32();
1533 TCGv_ptr addr
= tcg_temp_new_ptr();
1535 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1536 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1537 tcg_gen_andi_i32(t2
, t2
, 0xf);
1538 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1539 tcg_gen_ext_i32_ptr(addr
, t2
);
1540 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1542 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1543 tcg_temp_free_ptr(addr
);
1544 tcg_temp_free_i32(t2
);
1546 gen_store_gpr(t0
, to
);
1550 static inline void gen_store_srsgpr (int from
, int to
)
1553 TCGv t0
= tcg_temp_new();
1554 TCGv_i32 t2
= tcg_temp_new_i32();
1555 TCGv_ptr addr
= tcg_temp_new_ptr();
1557 gen_load_gpr(t0
, from
);
1558 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1559 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1560 tcg_gen_andi_i32(t2
, t2
, 0xf);
1561 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1562 tcg_gen_ext_i32_ptr(addr
, t2
);
1563 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1565 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1566 tcg_temp_free_ptr(addr
);
1567 tcg_temp_free_i32(t2
);
1573 static inline void gen_save_pc(target_ulong pc
)
1575 tcg_gen_movi_tl(cpu_PC
, pc
);
1578 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1580 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1581 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1582 gen_save_pc(ctx
->pc
);
1583 ctx
->saved_pc
= ctx
->pc
;
1585 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1586 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1587 ctx
->saved_hflags
= ctx
->hflags
;
1588 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1594 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1600 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1602 ctx
->saved_hflags
= ctx
->hflags
;
1603 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1609 ctx
->btarget
= env
->btarget
;
1614 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1616 TCGv_i32 texcp
= tcg_const_i32(excp
);
1617 TCGv_i32 terr
= tcg_const_i32(err
);
1618 save_cpu_state(ctx
, 1);
1619 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1620 tcg_temp_free_i32(terr
);
1621 tcg_temp_free_i32(texcp
);
1622 ctx
->bstate
= BS_EXCP
;
1625 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1627 gen_helper_0e0i(raise_exception
, excp
);
1630 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1632 generate_exception_err(ctx
, excp
, 0);
1635 /* Floating point register moves. */
1636 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1638 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1639 generate_exception(ctx
, EXCP_RI
);
1641 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1644 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1647 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1648 generate_exception(ctx
, EXCP_RI
);
1650 t64
= tcg_temp_new_i64();
1651 tcg_gen_extu_i32_i64(t64
, t
);
1652 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1653 tcg_temp_free_i64(t64
);
1656 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1658 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1659 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1661 gen_load_fpr32(ctx
, t
, reg
| 1);
1665 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1668 TCGv_i64 t64
= tcg_temp_new_i64();
1669 tcg_gen_extu_i32_i64(t64
, t
);
1670 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1671 tcg_temp_free_i64(t64
);
1673 gen_store_fpr32(ctx
, t
, reg
| 1);
1677 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1679 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1680 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1682 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1686 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1688 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1689 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1692 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1693 t0
= tcg_temp_new_i64();
1694 tcg_gen_shri_i64(t0
, t
, 32);
1695 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1696 tcg_temp_free_i64(t0
);
1700 static inline int get_fp_bit (int cc
)
1708 /* Addresses computation */
1709 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1711 tcg_gen_add_tl(ret
, arg0
, arg1
);
1713 #if defined(TARGET_MIPS64)
1714 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1715 tcg_gen_ext32s_i64(ret
, ret
);
1720 /* Addresses computation (translation time) */
1721 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1724 target_long sum
= base
+ offset
;
1726 #if defined(TARGET_MIPS64)
1727 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1734 /* Sign-extract the low 32-bits to a target_long. */
1735 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1737 #if defined(TARGET_MIPS64)
1738 tcg_gen_ext32s_i64(ret
, arg
);
1740 tcg_gen_extrl_i64_i32(ret
, arg
);
1744 /* Sign-extract the high 32-bits to a target_long. */
1745 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1747 #if defined(TARGET_MIPS64)
1748 tcg_gen_sari_i64(ret
, arg
, 32);
1750 tcg_gen_extrh_i64_i32(ret
, arg
);
1754 static inline void check_cp0_enabled(DisasContext
*ctx
)
1756 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1757 generate_exception_err(ctx
, EXCP_CpU
, 0);
1760 static inline void check_cp1_enabled(DisasContext
*ctx
)
1762 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1763 generate_exception_err(ctx
, EXCP_CpU
, 1);
1766 /* Verify that the processor is running with COP1X instructions enabled.
1767 This is associated with the nabla symbol in the MIPS32 and MIPS64
1770 static inline void check_cop1x(DisasContext
*ctx
)
1772 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1773 generate_exception_end(ctx
, EXCP_RI
);
1776 /* Verify that the processor is running with 64-bit floating-point
1777 operations enabled. */
1779 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1781 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1782 generate_exception_end(ctx
, EXCP_RI
);
1786 * Verify if floating point register is valid; an operation is not defined
1787 * if bit 0 of any register specification is set and the FR bit in the
1788 * Status register equals zero, since the register numbers specify an
1789 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1790 * in the Status register equals one, both even and odd register numbers
1791 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1793 * Multiple 64 bit wide registers can be checked by calling
1794 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1796 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1798 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1799 generate_exception_end(ctx
, EXCP_RI
);
1802 /* Verify that the processor is running with DSP instructions enabled.
1803 This is enabled by CP0 Status register MX(24) bit.
1806 static inline void check_dsp(DisasContext
*ctx
)
1808 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1809 if (ctx
->insn_flags
& ASE_DSP
) {
1810 generate_exception_end(ctx
, EXCP_DSPDIS
);
1812 generate_exception_end(ctx
, EXCP_RI
);
1817 static inline void check_dspr2(DisasContext
*ctx
)
1819 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1820 if (ctx
->insn_flags
& ASE_DSP
) {
1821 generate_exception_end(ctx
, EXCP_DSPDIS
);
1823 generate_exception_end(ctx
, EXCP_RI
);
1828 /* This code generates a "reserved instruction" exception if the
1829 CPU does not support the instruction set corresponding to flags. */
1830 static inline void check_insn(DisasContext
*ctx
, int flags
)
1832 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1833 generate_exception_end(ctx
, EXCP_RI
);
1837 /* This code generates a "reserved instruction" exception if the
1838 CPU has corresponding flag set which indicates that the instruction
1839 has been removed. */
1840 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1842 if (unlikely(ctx
->insn_flags
& flags
)) {
1843 generate_exception_end(ctx
, EXCP_RI
);
1847 /* This code generates a "reserved instruction" exception if the
1848 CPU does not support 64-bit paired-single (PS) floating point data type */
1849 static inline void check_ps(DisasContext
*ctx
)
1851 if (unlikely(!ctx
->ps
)) {
1852 generate_exception(ctx
, EXCP_RI
);
1854 check_cp1_64bitmode(ctx
);
1857 #ifdef TARGET_MIPS64
1858 /* This code generates a "reserved instruction" exception if 64-bit
1859 instructions are not enabled. */
1860 static inline void check_mips_64(DisasContext
*ctx
)
1862 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1863 generate_exception_end(ctx
, EXCP_RI
);
1867 #ifndef CONFIG_USER_ONLY
1868 static inline void check_mvh(DisasContext
*ctx
)
1870 if (unlikely(!ctx
->mvh
)) {
1871 generate_exception(ctx
, EXCP_RI
);
1876 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1877 calling interface for 32 and 64-bit FPRs. No sense in changing
1878 all callers for gen_load_fpr32 when we need the CTX parameter for
1880 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1881 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1882 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1883 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1884 int ft, int fs, int cc) \
1886 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1887 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1896 check_cp1_registers(ctx, fs | ft); \
1904 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1905 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1907 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1908 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1909 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1910 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1911 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1912 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1913 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1914 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1915 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1916 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1917 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1918 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1919 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1920 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1921 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1922 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1925 tcg_temp_free_i##bits (fp0); \
1926 tcg_temp_free_i##bits (fp1); \
1929 FOP_CONDS(, 0, d
, FMT_D
, 64)
1930 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1931 FOP_CONDS(, 0, s
, FMT_S
, 32)
1932 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1933 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1934 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1937 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1938 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1939 int ft, int fs, int fd) \
1941 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1942 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1943 if (ifmt == FMT_D) { \
1944 check_cp1_registers(ctx, fs | ft | fd); \
1946 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1947 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1950 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2004 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2007 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2010 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2013 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2019 tcg_temp_free_i ## bits (fp0); \
2020 tcg_temp_free_i ## bits (fp1); \
2023 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2024 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2026 #undef gen_ldcmp_fpr32
2027 #undef gen_ldcmp_fpr64
2029 /* load/store instructions. */
2030 #ifdef CONFIG_USER_ONLY
2031 #define OP_LD_ATOMIC(insn,fname) \
2032 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2034 TCGv t0 = tcg_temp_new(); \
2035 tcg_gen_mov_tl(t0, arg1); \
2036 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2037 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2038 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2039 tcg_temp_free(t0); \
2042 #define OP_LD_ATOMIC(insn,fname) \
2043 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2045 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2048 OP_LD_ATOMIC(ll
,ld32s
);
2049 #if defined(TARGET_MIPS64)
2050 OP_LD_ATOMIC(lld
,ld64
);
2054 #ifdef CONFIG_USER_ONLY
2055 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2056 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2058 TCGv t0 = tcg_temp_new(); \
2059 TCGLabel *l1 = gen_new_label(); \
2060 TCGLabel *l2 = gen_new_label(); \
2062 tcg_gen_andi_tl(t0, arg2, almask); \
2063 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2064 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2065 generate_exception(ctx, EXCP_AdES); \
2066 gen_set_label(l1); \
2067 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2068 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2069 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2070 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2071 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2072 generate_exception_end(ctx, EXCP_SC); \
2073 gen_set_label(l2); \
2074 tcg_gen_movi_tl(t0, 0); \
2075 gen_store_gpr(t0, rt); \
2076 tcg_temp_free(t0); \
2079 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2080 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2082 TCGv t0 = tcg_temp_new(); \
2083 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2084 gen_store_gpr(t0, rt); \
2085 tcg_temp_free(t0); \
2088 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2089 #if defined(TARGET_MIPS64)
2090 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2094 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2095 int base
, int16_t offset
)
2098 tcg_gen_movi_tl(addr
, offset
);
2099 } else if (offset
== 0) {
2100 gen_load_gpr(addr
, base
);
2102 tcg_gen_movi_tl(addr
, offset
);
2103 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2107 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2109 target_ulong pc
= ctx
->pc
;
2111 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2112 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2117 pc
&= ~(target_ulong
)3;
2122 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2123 int rt
, int base
, int16_t offset
)
2127 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2128 /* Loongson CPU uses a load to zero register for prefetch.
2129 We emulate it as a NOP. On other CPU we must perform the
2130 actual memory access. */
2134 t0
= tcg_temp_new();
2135 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2138 #if defined(TARGET_MIPS64)
2140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2141 ctx
->default_tcg_memop_mask
);
2142 gen_store_gpr(t0
, rt
);
2145 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2146 ctx
->default_tcg_memop_mask
);
2147 gen_store_gpr(t0
, rt
);
2151 op_ld_lld(t0
, t0
, ctx
);
2152 gen_store_gpr(t0
, rt
);
2155 t1
= tcg_temp_new();
2156 /* Do a byte access to possibly trigger a page
2157 fault with the unaligned address. */
2158 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2159 tcg_gen_andi_tl(t1
, t0
, 7);
2160 #ifndef TARGET_WORDS_BIGENDIAN
2161 tcg_gen_xori_tl(t1
, t1
, 7);
2163 tcg_gen_shli_tl(t1
, t1
, 3);
2164 tcg_gen_andi_tl(t0
, t0
, ~7);
2165 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2166 tcg_gen_shl_tl(t0
, t0
, t1
);
2167 t2
= tcg_const_tl(-1);
2168 tcg_gen_shl_tl(t2
, t2
, t1
);
2169 gen_load_gpr(t1
, rt
);
2170 tcg_gen_andc_tl(t1
, t1
, t2
);
2172 tcg_gen_or_tl(t0
, t0
, t1
);
2174 gen_store_gpr(t0
, rt
);
2177 t1
= tcg_temp_new();
2178 /* Do a byte access to possibly trigger a page
2179 fault with the unaligned address. */
2180 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2181 tcg_gen_andi_tl(t1
, t0
, 7);
2182 #ifdef TARGET_WORDS_BIGENDIAN
2183 tcg_gen_xori_tl(t1
, t1
, 7);
2185 tcg_gen_shli_tl(t1
, t1
, 3);
2186 tcg_gen_andi_tl(t0
, t0
, ~7);
2187 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2188 tcg_gen_shr_tl(t0
, t0
, t1
);
2189 tcg_gen_xori_tl(t1
, t1
, 63);
2190 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2191 tcg_gen_shl_tl(t2
, t2
, t1
);
2192 gen_load_gpr(t1
, rt
);
2193 tcg_gen_and_tl(t1
, t1
, t2
);
2195 tcg_gen_or_tl(t0
, t0
, t1
);
2197 gen_store_gpr(t0
, rt
);
2200 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2201 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2203 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2204 gen_store_gpr(t0
, rt
);
2208 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2209 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2212 gen_store_gpr(t0
, rt
);
2215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2216 ctx
->default_tcg_memop_mask
);
2217 gen_store_gpr(t0
, rt
);
2220 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2221 ctx
->default_tcg_memop_mask
);
2222 gen_store_gpr(t0
, rt
);
2225 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2226 ctx
->default_tcg_memop_mask
);
2227 gen_store_gpr(t0
, rt
);
2230 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2231 gen_store_gpr(t0
, rt
);
2234 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2235 gen_store_gpr(t0
, rt
);
2238 t1
= tcg_temp_new();
2239 /* Do a byte access to possibly trigger a page
2240 fault with the unaligned address. */
2241 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2242 tcg_gen_andi_tl(t1
, t0
, 3);
2243 #ifndef TARGET_WORDS_BIGENDIAN
2244 tcg_gen_xori_tl(t1
, t1
, 3);
2246 tcg_gen_shli_tl(t1
, t1
, 3);
2247 tcg_gen_andi_tl(t0
, t0
, ~3);
2248 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2249 tcg_gen_shl_tl(t0
, t0
, t1
);
2250 t2
= tcg_const_tl(-1);
2251 tcg_gen_shl_tl(t2
, t2
, t1
);
2252 gen_load_gpr(t1
, rt
);
2253 tcg_gen_andc_tl(t1
, t1
, t2
);
2255 tcg_gen_or_tl(t0
, t0
, t1
);
2257 tcg_gen_ext32s_tl(t0
, t0
);
2258 gen_store_gpr(t0
, rt
);
2261 t1
= tcg_temp_new();
2262 /* Do a byte access to possibly trigger a page
2263 fault with the unaligned address. */
2264 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2265 tcg_gen_andi_tl(t1
, t0
, 3);
2266 #ifdef TARGET_WORDS_BIGENDIAN
2267 tcg_gen_xori_tl(t1
, t1
, 3);
2269 tcg_gen_shli_tl(t1
, t1
, 3);
2270 tcg_gen_andi_tl(t0
, t0
, ~3);
2271 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2272 tcg_gen_shr_tl(t0
, t0
, t1
);
2273 tcg_gen_xori_tl(t1
, t1
, 31);
2274 t2
= tcg_const_tl(0xfffffffeull
);
2275 tcg_gen_shl_tl(t2
, t2
, t1
);
2276 gen_load_gpr(t1
, rt
);
2277 tcg_gen_and_tl(t1
, t1
, t2
);
2279 tcg_gen_or_tl(t0
, t0
, t1
);
2281 tcg_gen_ext32s_tl(t0
, t0
);
2282 gen_store_gpr(t0
, rt
);
2286 op_ld_ll(t0
, t0
, ctx
);
2287 gen_store_gpr(t0
, rt
);
2294 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2295 int base
, int16_t offset
)
2297 TCGv t0
= tcg_temp_new();
2298 TCGv t1
= tcg_temp_new();
2300 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2301 gen_load_gpr(t1
, rt
);
2303 #if defined(TARGET_MIPS64)
2305 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2306 ctx
->default_tcg_memop_mask
);
2309 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2312 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2316 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2317 ctx
->default_tcg_memop_mask
);
2320 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2321 ctx
->default_tcg_memop_mask
);
2324 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2327 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2330 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2338 /* Store conditional */
2339 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2340 int base
, int16_t offset
)
2344 #ifdef CONFIG_USER_ONLY
2345 t0
= tcg_temp_local_new();
2346 t1
= tcg_temp_local_new();
2348 t0
= tcg_temp_new();
2349 t1
= tcg_temp_new();
2351 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2352 gen_load_gpr(t1
, rt
);
2354 #if defined(TARGET_MIPS64)
2357 op_st_scd(t1
, t0
, rt
, ctx
);
2362 op_st_sc(t1
, t0
, rt
, ctx
);
2369 /* Load and store */
2370 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2371 int base
, int16_t offset
)
2373 TCGv t0
= tcg_temp_new();
2375 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2376 /* Don't do NOP if destination is zero: we must perform the actual
2381 TCGv_i32 fp0
= tcg_temp_new_i32();
2382 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2383 ctx
->default_tcg_memop_mask
);
2384 gen_store_fpr32(ctx
, fp0
, ft
);
2385 tcg_temp_free_i32(fp0
);
2390 TCGv_i32 fp0
= tcg_temp_new_i32();
2391 gen_load_fpr32(ctx
, fp0
, ft
);
2392 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2393 ctx
->default_tcg_memop_mask
);
2394 tcg_temp_free_i32(fp0
);
2399 TCGv_i64 fp0
= tcg_temp_new_i64();
2400 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2401 ctx
->default_tcg_memop_mask
);
2402 gen_store_fpr64(ctx
, fp0
, ft
);
2403 tcg_temp_free_i64(fp0
);
2408 TCGv_i64 fp0
= tcg_temp_new_i64();
2409 gen_load_fpr64(ctx
, fp0
, ft
);
2410 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2411 ctx
->default_tcg_memop_mask
);
2412 tcg_temp_free_i64(fp0
);
2416 MIPS_INVAL("flt_ldst");
2417 generate_exception_end(ctx
, EXCP_RI
);
2424 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2425 int rs
, int16_t imm
)
2427 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2428 check_cp1_enabled(ctx
);
2432 check_insn(ctx
, ISA_MIPS2
);
2435 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2438 generate_exception_err(ctx
, EXCP_CpU
, 1);
2442 /* Arithmetic with immediate operand */
2443 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2444 int rt
, int rs
, int16_t imm
)
2446 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2448 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2449 /* If no destination, treat it as a NOP.
2450 For addi, we must generate the overflow exception when needed. */
2456 TCGv t0
= tcg_temp_local_new();
2457 TCGv t1
= tcg_temp_new();
2458 TCGv t2
= tcg_temp_new();
2459 TCGLabel
*l1
= gen_new_label();
2461 gen_load_gpr(t1
, rs
);
2462 tcg_gen_addi_tl(t0
, t1
, uimm
);
2463 tcg_gen_ext32s_tl(t0
, t0
);
2465 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2466 tcg_gen_xori_tl(t2
, t0
, uimm
);
2467 tcg_gen_and_tl(t1
, t1
, t2
);
2469 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2471 /* operands of same sign, result different sign */
2472 generate_exception(ctx
, EXCP_OVERFLOW
);
2474 tcg_gen_ext32s_tl(t0
, t0
);
2475 gen_store_gpr(t0
, rt
);
2481 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2482 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2484 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2487 #if defined(TARGET_MIPS64)
2490 TCGv t0
= tcg_temp_local_new();
2491 TCGv t1
= tcg_temp_new();
2492 TCGv t2
= tcg_temp_new();
2493 TCGLabel
*l1
= gen_new_label();
2495 gen_load_gpr(t1
, rs
);
2496 tcg_gen_addi_tl(t0
, t1
, uimm
);
2498 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2499 tcg_gen_xori_tl(t2
, t0
, uimm
);
2500 tcg_gen_and_tl(t1
, t1
, t2
);
2502 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2504 /* operands of same sign, result different sign */
2505 generate_exception(ctx
, EXCP_OVERFLOW
);
2507 gen_store_gpr(t0
, rt
);
2513 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2515 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2522 /* Logic with immediate operand */
2523 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2524 int rt
, int rs
, int16_t imm
)
2529 /* If no destination, treat it as a NOP. */
2532 uimm
= (uint16_t)imm
;
2535 if (likely(rs
!= 0))
2536 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2542 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2544 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2547 if (likely(rs
!= 0))
2548 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2550 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2553 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2555 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2556 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2558 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2567 /* Set on less than with immediate operand */
2568 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2569 int rt
, int rs
, int16_t imm
)
2571 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2575 /* If no destination, treat it as a NOP. */
2578 t0
= tcg_temp_new();
2579 gen_load_gpr(t0
, rs
);
2582 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2585 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2591 /* Shifts with immediate operand */
2592 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2593 int rt
, int rs
, int16_t imm
)
2595 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2599 /* If no destination, treat it as a NOP. */
2603 t0
= tcg_temp_new();
2604 gen_load_gpr(t0
, rs
);
2607 tcg_gen_shli_tl(t0
, t0
, uimm
);
2608 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2611 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2615 tcg_gen_ext32u_tl(t0
, t0
);
2616 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2618 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2623 TCGv_i32 t1
= tcg_temp_new_i32();
2625 tcg_gen_trunc_tl_i32(t1
, t0
);
2626 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2627 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2628 tcg_temp_free_i32(t1
);
2630 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2633 #if defined(TARGET_MIPS64)
2635 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2638 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2641 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2645 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2647 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2651 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2654 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2657 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2660 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2668 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2669 int rd
, int rs
, int rt
)
2671 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2672 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2673 /* If no destination, treat it as a NOP.
2674 For add & sub, we must generate the overflow exception when needed. */
2681 TCGv t0
= tcg_temp_local_new();
2682 TCGv t1
= tcg_temp_new();
2683 TCGv t2
= tcg_temp_new();
2684 TCGLabel
*l1
= gen_new_label();
2686 gen_load_gpr(t1
, rs
);
2687 gen_load_gpr(t2
, rt
);
2688 tcg_gen_add_tl(t0
, t1
, t2
);
2689 tcg_gen_ext32s_tl(t0
, t0
);
2690 tcg_gen_xor_tl(t1
, t1
, t2
);
2691 tcg_gen_xor_tl(t2
, t0
, t2
);
2692 tcg_gen_andc_tl(t1
, t2
, t1
);
2694 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2696 /* operands of same sign, result different sign */
2697 generate_exception(ctx
, EXCP_OVERFLOW
);
2699 gen_store_gpr(t0
, rd
);
2704 if (rs
!= 0 && rt
!= 0) {
2705 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2706 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2707 } else if (rs
== 0 && rt
!= 0) {
2708 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2709 } else if (rs
!= 0 && rt
== 0) {
2710 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2712 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2717 TCGv t0
= tcg_temp_local_new();
2718 TCGv t1
= tcg_temp_new();
2719 TCGv t2
= tcg_temp_new();
2720 TCGLabel
*l1
= gen_new_label();
2722 gen_load_gpr(t1
, rs
);
2723 gen_load_gpr(t2
, rt
);
2724 tcg_gen_sub_tl(t0
, t1
, t2
);
2725 tcg_gen_ext32s_tl(t0
, t0
);
2726 tcg_gen_xor_tl(t2
, t1
, t2
);
2727 tcg_gen_xor_tl(t1
, t0
, t1
);
2728 tcg_gen_and_tl(t1
, t1
, t2
);
2730 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2732 /* operands of different sign, first operand and result different sign */
2733 generate_exception(ctx
, EXCP_OVERFLOW
);
2735 gen_store_gpr(t0
, rd
);
2740 if (rs
!= 0 && rt
!= 0) {
2741 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2742 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2743 } else if (rs
== 0 && rt
!= 0) {
2744 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2745 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2746 } else if (rs
!= 0 && rt
== 0) {
2747 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2749 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2752 #if defined(TARGET_MIPS64)
2755 TCGv t0
= tcg_temp_local_new();
2756 TCGv t1
= tcg_temp_new();
2757 TCGv t2
= tcg_temp_new();
2758 TCGLabel
*l1
= gen_new_label();
2760 gen_load_gpr(t1
, rs
);
2761 gen_load_gpr(t2
, rt
);
2762 tcg_gen_add_tl(t0
, t1
, t2
);
2763 tcg_gen_xor_tl(t1
, t1
, t2
);
2764 tcg_gen_xor_tl(t2
, t0
, t2
);
2765 tcg_gen_andc_tl(t1
, t2
, t1
);
2767 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2769 /* operands of same sign, result different sign */
2770 generate_exception(ctx
, EXCP_OVERFLOW
);
2772 gen_store_gpr(t0
, rd
);
2777 if (rs
!= 0 && rt
!= 0) {
2778 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2779 } else if (rs
== 0 && rt
!= 0) {
2780 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2781 } else if (rs
!= 0 && rt
== 0) {
2782 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2784 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2789 TCGv t0
= tcg_temp_local_new();
2790 TCGv t1
= tcg_temp_new();
2791 TCGv t2
= tcg_temp_new();
2792 TCGLabel
*l1
= gen_new_label();
2794 gen_load_gpr(t1
, rs
);
2795 gen_load_gpr(t2
, rt
);
2796 tcg_gen_sub_tl(t0
, t1
, t2
);
2797 tcg_gen_xor_tl(t2
, t1
, t2
);
2798 tcg_gen_xor_tl(t1
, t0
, t1
);
2799 tcg_gen_and_tl(t1
, t1
, t2
);
2801 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2803 /* operands of different sign, first operand and result different sign */
2804 generate_exception(ctx
, EXCP_OVERFLOW
);
2806 gen_store_gpr(t0
, rd
);
2811 if (rs
!= 0 && rt
!= 0) {
2812 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2813 } else if (rs
== 0 && rt
!= 0) {
2814 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2815 } else if (rs
!= 0 && rt
== 0) {
2816 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2818 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2823 if (likely(rs
!= 0 && rt
!= 0)) {
2824 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2825 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2827 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2833 /* Conditional move */
2834 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2835 int rd
, int rs
, int rt
)
2840 /* If no destination, treat it as a NOP. */
2844 t0
= tcg_temp_new();
2845 gen_load_gpr(t0
, rt
);
2846 t1
= tcg_const_tl(0);
2847 t2
= tcg_temp_new();
2848 gen_load_gpr(t2
, rs
);
2851 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2854 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2857 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2860 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2869 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2870 int rd
, int rs
, int rt
)
2873 /* If no destination, treat it as a NOP. */
2879 if (likely(rs
!= 0 && rt
!= 0)) {
2880 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2882 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2886 if (rs
!= 0 && rt
!= 0) {
2887 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2888 } else if (rs
== 0 && rt
!= 0) {
2889 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2890 } else if (rs
!= 0 && rt
== 0) {
2891 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2893 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2897 if (likely(rs
!= 0 && rt
!= 0)) {
2898 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2899 } else if (rs
== 0 && rt
!= 0) {
2900 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2901 } else if (rs
!= 0 && rt
== 0) {
2902 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2904 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2908 if (likely(rs
!= 0 && rt
!= 0)) {
2909 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2910 } else if (rs
== 0 && rt
!= 0) {
2911 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2912 } else if (rs
!= 0 && rt
== 0) {
2913 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2915 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2921 /* Set on lower than */
2922 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2923 int rd
, int rs
, int rt
)
2928 /* If no destination, treat it as a NOP. */
2932 t0
= tcg_temp_new();
2933 t1
= tcg_temp_new();
2934 gen_load_gpr(t0
, rs
);
2935 gen_load_gpr(t1
, rt
);
2938 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2941 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2949 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2950 int rd
, int rs
, int rt
)
2955 /* If no destination, treat it as a NOP.
2956 For add & sub, we must generate the overflow exception when needed. */
2960 t0
= tcg_temp_new();
2961 t1
= tcg_temp_new();
2962 gen_load_gpr(t0
, rs
);
2963 gen_load_gpr(t1
, rt
);
2966 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2967 tcg_gen_shl_tl(t0
, t1
, t0
);
2968 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2971 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2972 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2975 tcg_gen_ext32u_tl(t1
, t1
);
2976 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2977 tcg_gen_shr_tl(t0
, t1
, t0
);
2978 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2982 TCGv_i32 t2
= tcg_temp_new_i32();
2983 TCGv_i32 t3
= tcg_temp_new_i32();
2985 tcg_gen_trunc_tl_i32(t2
, t0
);
2986 tcg_gen_trunc_tl_i32(t3
, t1
);
2987 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2988 tcg_gen_rotr_i32(t2
, t3
, t2
);
2989 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2990 tcg_temp_free_i32(t2
);
2991 tcg_temp_free_i32(t3
);
2994 #if defined(TARGET_MIPS64)
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3000 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3001 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3004 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3005 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3008 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3009 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3017 /* Arithmetic on HI/LO registers */
3018 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3020 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3031 #if defined(TARGET_MIPS64)
3033 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3037 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3041 #if defined(TARGET_MIPS64)
3043 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3047 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3052 #if defined(TARGET_MIPS64)
3054 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3058 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3061 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3066 #if defined(TARGET_MIPS64)
3068 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3072 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3075 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3081 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3084 TCGv t0
= tcg_const_tl(addr
);
3085 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3086 gen_store_gpr(t0
, reg
);
3090 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3096 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3099 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3100 addr
= addr_add(ctx
, pc
, offset
);
3101 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3105 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3106 addr
= addr_add(ctx
, pc
, offset
);
3107 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3109 #if defined(TARGET_MIPS64)
3112 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3113 addr
= addr_add(ctx
, pc
, offset
);
3114 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3118 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3121 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3122 addr
= addr_add(ctx
, pc
, offset
);
3123 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3128 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3129 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3130 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3133 #if defined(TARGET_MIPS64)
3134 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3135 case R6_OPC_LDPC
+ (1 << 16):
3136 case R6_OPC_LDPC
+ (2 << 16):
3137 case R6_OPC_LDPC
+ (3 << 16):
3139 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3140 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3141 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3145 MIPS_INVAL("OPC_PCREL");
3146 generate_exception_end(ctx
, EXCP_RI
);
3153 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3162 t0
= tcg_temp_new();
3163 t1
= tcg_temp_new();
3165 gen_load_gpr(t0
, rs
);
3166 gen_load_gpr(t1
, rt
);
3171 TCGv t2
= tcg_temp_new();
3172 TCGv t3
= tcg_temp_new();
3173 tcg_gen_ext32s_tl(t0
, t0
);
3174 tcg_gen_ext32s_tl(t1
, t1
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3176 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3177 tcg_gen_and_tl(t2
, t2
, t3
);
3178 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3179 tcg_gen_or_tl(t2
, t2
, t3
);
3180 tcg_gen_movi_tl(t3
, 0);
3181 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3182 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3183 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3190 TCGv t2
= tcg_temp_new();
3191 TCGv t3
= tcg_temp_new();
3192 tcg_gen_ext32s_tl(t0
, t0
);
3193 tcg_gen_ext32s_tl(t1
, t1
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3195 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3196 tcg_gen_and_tl(t2
, t2
, t3
);
3197 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3198 tcg_gen_or_tl(t2
, t2
, t3
);
3199 tcg_gen_movi_tl(t3
, 0);
3200 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3201 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3202 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3209 TCGv t2
= tcg_const_tl(0);
3210 TCGv t3
= tcg_const_tl(1);
3211 tcg_gen_ext32u_tl(t0
, t0
);
3212 tcg_gen_ext32u_tl(t1
, t1
);
3213 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3214 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3215 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3222 TCGv t2
= tcg_const_tl(0);
3223 TCGv t3
= tcg_const_tl(1);
3224 tcg_gen_ext32u_tl(t0
, t0
);
3225 tcg_gen_ext32u_tl(t1
, t1
);
3226 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3227 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3228 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3235 TCGv_i32 t2
= tcg_temp_new_i32();
3236 TCGv_i32 t3
= tcg_temp_new_i32();
3237 tcg_gen_trunc_tl_i32(t2
, t0
);
3238 tcg_gen_trunc_tl_i32(t3
, t1
);
3239 tcg_gen_mul_i32(t2
, t2
, t3
);
3240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3241 tcg_temp_free_i32(t2
);
3242 tcg_temp_free_i32(t3
);
3247 TCGv_i32 t2
= tcg_temp_new_i32();
3248 TCGv_i32 t3
= tcg_temp_new_i32();
3249 tcg_gen_trunc_tl_i32(t2
, t0
);
3250 tcg_gen_trunc_tl_i32(t3
, t1
);
3251 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3252 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3253 tcg_temp_free_i32(t2
);
3254 tcg_temp_free_i32(t3
);
3259 TCGv_i32 t2
= tcg_temp_new_i32();
3260 TCGv_i32 t3
= tcg_temp_new_i32();
3261 tcg_gen_trunc_tl_i32(t2
, t0
);
3262 tcg_gen_trunc_tl_i32(t3
, t1
);
3263 tcg_gen_mul_i32(t2
, t2
, t3
);
3264 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3265 tcg_temp_free_i32(t2
);
3266 tcg_temp_free_i32(t3
);
3271 TCGv_i32 t2
= tcg_temp_new_i32();
3272 TCGv_i32 t3
= tcg_temp_new_i32();
3273 tcg_gen_trunc_tl_i32(t2
, t0
);
3274 tcg_gen_trunc_tl_i32(t3
, t1
);
3275 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3276 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3277 tcg_temp_free_i32(t2
);
3278 tcg_temp_free_i32(t3
);
3281 #if defined(TARGET_MIPS64)
3284 TCGv t2
= tcg_temp_new();
3285 TCGv t3
= tcg_temp_new();
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3287 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3288 tcg_gen_and_tl(t2
, t2
, t3
);
3289 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3290 tcg_gen_or_tl(t2
, t2
, t3
);
3291 tcg_gen_movi_tl(t3
, 0);
3292 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3293 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3300 TCGv t2
= tcg_temp_new();
3301 TCGv t3
= tcg_temp_new();
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3303 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3304 tcg_gen_and_tl(t2
, t2
, t3
);
3305 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3306 tcg_gen_or_tl(t2
, t2
, t3
);
3307 tcg_gen_movi_tl(t3
, 0);
3308 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3309 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3316 TCGv t2
= tcg_const_tl(0);
3317 TCGv t3
= tcg_const_tl(1);
3318 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3319 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3326 TCGv t2
= tcg_const_tl(0);
3327 TCGv t3
= tcg_const_tl(1);
3328 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3329 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3335 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3339 TCGv t2
= tcg_temp_new();
3340 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3345 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3349 TCGv t2
= tcg_temp_new();
3350 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3356 MIPS_INVAL("r6 mul/div");
3357 generate_exception_end(ctx
, EXCP_RI
);
3365 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3366 int acc
, int rs
, int rt
)
3370 t0
= tcg_temp_new();
3371 t1
= tcg_temp_new();
3373 gen_load_gpr(t0
, rs
);
3374 gen_load_gpr(t1
, rt
);
3383 TCGv t2
= tcg_temp_new();
3384 TCGv t3
= tcg_temp_new();
3385 tcg_gen_ext32s_tl(t0
, t0
);
3386 tcg_gen_ext32s_tl(t1
, t1
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3388 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3389 tcg_gen_and_tl(t2
, t2
, t3
);
3390 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3391 tcg_gen_or_tl(t2
, t2
, t3
);
3392 tcg_gen_movi_tl(t3
, 0);
3393 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3394 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3395 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3396 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3397 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3404 TCGv t2
= tcg_const_tl(0);
3405 TCGv t3
= tcg_const_tl(1);
3406 tcg_gen_ext32u_tl(t0
, t0
);
3407 tcg_gen_ext32u_tl(t1
, t1
);
3408 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3409 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3410 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3411 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3412 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3419 TCGv_i32 t2
= tcg_temp_new_i32();
3420 TCGv_i32 t3
= tcg_temp_new_i32();
3421 tcg_gen_trunc_tl_i32(t2
, t0
);
3422 tcg_gen_trunc_tl_i32(t3
, t1
);
3423 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3424 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3425 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3426 tcg_temp_free_i32(t2
);
3427 tcg_temp_free_i32(t3
);
3432 TCGv_i32 t2
= tcg_temp_new_i32();
3433 TCGv_i32 t3
= tcg_temp_new_i32();
3434 tcg_gen_trunc_tl_i32(t2
, t0
);
3435 tcg_gen_trunc_tl_i32(t3
, t1
);
3436 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3437 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3438 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3439 tcg_temp_free_i32(t2
);
3440 tcg_temp_free_i32(t3
);
3443 #if defined(TARGET_MIPS64)
3446 TCGv t2
= tcg_temp_new();
3447 TCGv t3
= tcg_temp_new();
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3449 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3450 tcg_gen_and_tl(t2
, t2
, t3
);
3451 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3452 tcg_gen_or_tl(t2
, t2
, t3
);
3453 tcg_gen_movi_tl(t3
, 0);
3454 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3455 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3456 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3463 TCGv t2
= tcg_const_tl(0);
3464 TCGv t3
= tcg_const_tl(1);
3465 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3466 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3467 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3473 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3476 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3481 TCGv_i64 t2
= tcg_temp_new_i64();
3482 TCGv_i64 t3
= tcg_temp_new_i64();
3484 tcg_gen_ext_tl_i64(t2
, t0
);
3485 tcg_gen_ext_tl_i64(t3
, t1
);
3486 tcg_gen_mul_i64(t2
, t2
, t3
);
3487 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3488 tcg_gen_add_i64(t2
, t2
, t3
);
3489 tcg_temp_free_i64(t3
);
3490 gen_move_low32(cpu_LO
[acc
], t2
);
3491 gen_move_high32(cpu_HI
[acc
], t2
);
3492 tcg_temp_free_i64(t2
);
3497 TCGv_i64 t2
= tcg_temp_new_i64();
3498 TCGv_i64 t3
= tcg_temp_new_i64();
3500 tcg_gen_ext32u_tl(t0
, t0
);
3501 tcg_gen_ext32u_tl(t1
, t1
);
3502 tcg_gen_extu_tl_i64(t2
, t0
);
3503 tcg_gen_extu_tl_i64(t3
, t1
);
3504 tcg_gen_mul_i64(t2
, t2
, t3
);
3505 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3506 tcg_gen_add_i64(t2
, t2
, t3
);
3507 tcg_temp_free_i64(t3
);
3508 gen_move_low32(cpu_LO
[acc
], t2
);
3509 gen_move_high32(cpu_HI
[acc
], t2
);
3510 tcg_temp_free_i64(t2
);
3515 TCGv_i64 t2
= tcg_temp_new_i64();
3516 TCGv_i64 t3
= tcg_temp_new_i64();
3518 tcg_gen_ext_tl_i64(t2
, t0
);
3519 tcg_gen_ext_tl_i64(t3
, t1
);
3520 tcg_gen_mul_i64(t2
, t2
, t3
);
3521 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3522 tcg_gen_sub_i64(t2
, t3
, t2
);
3523 tcg_temp_free_i64(t3
);
3524 gen_move_low32(cpu_LO
[acc
], t2
);
3525 gen_move_high32(cpu_HI
[acc
], t2
);
3526 tcg_temp_free_i64(t2
);
3531 TCGv_i64 t2
= tcg_temp_new_i64();
3532 TCGv_i64 t3
= tcg_temp_new_i64();
3534 tcg_gen_ext32u_tl(t0
, t0
);
3535 tcg_gen_ext32u_tl(t1
, t1
);
3536 tcg_gen_extu_tl_i64(t2
, t0
);
3537 tcg_gen_extu_tl_i64(t3
, t1
);
3538 tcg_gen_mul_i64(t2
, t2
, t3
);
3539 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3540 tcg_gen_sub_i64(t2
, t3
, t2
);
3541 tcg_temp_free_i64(t3
);
3542 gen_move_low32(cpu_LO
[acc
], t2
);
3543 gen_move_high32(cpu_HI
[acc
], t2
);
3544 tcg_temp_free_i64(t2
);
3548 MIPS_INVAL("mul/div");
3549 generate_exception_end(ctx
, EXCP_RI
);
3557 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3558 int rd
, int rs
, int rt
)
3560 TCGv t0
= tcg_temp_new();
3561 TCGv t1
= tcg_temp_new();
3563 gen_load_gpr(t0
, rs
);
3564 gen_load_gpr(t1
, rt
);
3567 case OPC_VR54XX_MULS
:
3568 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MULSU
:
3571 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MACC
:
3574 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MACCU
:
3577 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MSAC
:
3580 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MSACU
:
3583 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MULHI
:
3586 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MULHIU
:
3589 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MULSHI
:
3592 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MULSHIU
:
3595 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3597 case OPC_VR54XX_MACCHI
:
3598 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3600 case OPC_VR54XX_MACCHIU
:
3601 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3603 case OPC_VR54XX_MSACHI
:
3604 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3606 case OPC_VR54XX_MSACHIU
:
3607 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3610 MIPS_INVAL("mul vr54xx");
3611 generate_exception_end(ctx
, EXCP_RI
);
3614 gen_store_gpr(t0
, rd
);
3621 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3631 gen_load_gpr(t0
, rs
);
3636 #if defined(TARGET_MIPS64)
3640 tcg_gen_not_tl(t0
, t0
);
3649 tcg_gen_ext32u_tl(t0
, t0
);
3650 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3651 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3653 #if defined(TARGET_MIPS64)
3658 tcg_gen_clzi_i64(t0
, t0
, 64);
3664 /* Godson integer instructions */
3665 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3666 int rd
, int rs
, int rt
)
3678 case OPC_MULTU_G_2E
:
3679 case OPC_MULTU_G_2F
:
3680 #if defined(TARGET_MIPS64)
3681 case OPC_DMULT_G_2E
:
3682 case OPC_DMULT_G_2F
:
3683 case OPC_DMULTU_G_2E
:
3684 case OPC_DMULTU_G_2F
:
3686 t0
= tcg_temp_new();
3687 t1
= tcg_temp_new();
3690 t0
= tcg_temp_local_new();
3691 t1
= tcg_temp_local_new();
3695 gen_load_gpr(t0
, rs
);
3696 gen_load_gpr(t1
, rt
);
3701 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3702 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3704 case OPC_MULTU_G_2E
:
3705 case OPC_MULTU_G_2F
:
3706 tcg_gen_ext32u_tl(t0
, t0
);
3707 tcg_gen_ext32u_tl(t1
, t1
);
3708 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3709 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3714 TCGLabel
*l1
= gen_new_label();
3715 TCGLabel
*l2
= gen_new_label();
3716 TCGLabel
*l3
= gen_new_label();
3717 tcg_gen_ext32s_tl(t0
, t0
);
3718 tcg_gen_ext32s_tl(t1
, t1
);
3719 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3720 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3723 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3724 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3725 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3728 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3729 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3736 TCGLabel
*l1
= gen_new_label();
3737 TCGLabel
*l2
= gen_new_label();
3738 tcg_gen_ext32u_tl(t0
, t0
);
3739 tcg_gen_ext32u_tl(t1
, t1
);
3740 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3741 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3744 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3745 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3752 TCGLabel
*l1
= gen_new_label();
3753 TCGLabel
*l2
= gen_new_label();
3754 TCGLabel
*l3
= gen_new_label();
3755 tcg_gen_ext32u_tl(t0
, t0
);
3756 tcg_gen_ext32u_tl(t1
, t1
);
3757 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3758 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3759 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3761 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3764 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3765 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3772 TCGLabel
*l1
= gen_new_label();
3773 TCGLabel
*l2
= gen_new_label();
3774 tcg_gen_ext32u_tl(t0
, t0
);
3775 tcg_gen_ext32u_tl(t1
, t1
);
3776 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3777 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3780 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3781 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3785 #if defined(TARGET_MIPS64)
3786 case OPC_DMULT_G_2E
:
3787 case OPC_DMULT_G_2F
:
3788 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3790 case OPC_DMULTU_G_2E
:
3791 case OPC_DMULTU_G_2F
:
3792 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3797 TCGLabel
*l1
= gen_new_label();
3798 TCGLabel
*l2
= gen_new_label();
3799 TCGLabel
*l3
= gen_new_label();
3800 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3801 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3805 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3806 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3809 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3813 case OPC_DDIVU_G_2E
:
3814 case OPC_DDIVU_G_2F
:
3816 TCGLabel
*l1
= gen_new_label();
3817 TCGLabel
*l2
= gen_new_label();
3818 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3819 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3822 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3829 TCGLabel
*l1
= gen_new_label();
3830 TCGLabel
*l2
= gen_new_label();
3831 TCGLabel
*l3
= gen_new_label();
3832 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3833 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3834 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3836 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3839 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3843 case OPC_DMODU_G_2E
:
3844 case OPC_DMODU_G_2F
:
3846 TCGLabel
*l1
= gen_new_label();
3847 TCGLabel
*l2
= gen_new_label();
3848 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3849 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3852 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3863 /* Loongson multimedia instructions */
3864 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3866 uint32_t opc
, shift_max
;
3869 opc
= MASK_LMI(ctx
->opcode
);
3875 t0
= tcg_temp_local_new_i64();
3876 t1
= tcg_temp_local_new_i64();
3879 t0
= tcg_temp_new_i64();
3880 t1
= tcg_temp_new_i64();
3884 check_cp1_enabled(ctx
);
3885 gen_load_fpr64(ctx
, t0
, rs
);
3886 gen_load_fpr64(ctx
, t1
, rt
);
3888 #define LMI_HELPER(UP, LO) \
3889 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3890 #define LMI_HELPER_1(UP, LO) \
3891 case OPC_##UP: gen_helper_##LO(t0, t0); break
3892 #define LMI_DIRECT(UP, LO, OP) \
3893 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3896 LMI_HELPER(PADDSH
, paddsh
);
3897 LMI_HELPER(PADDUSH
, paddush
);
3898 LMI_HELPER(PADDH
, paddh
);
3899 LMI_HELPER(PADDW
, paddw
);
3900 LMI_HELPER(PADDSB
, paddsb
);
3901 LMI_HELPER(PADDUSB
, paddusb
);
3902 LMI_HELPER(PADDB
, paddb
);
3904 LMI_HELPER(PSUBSH
, psubsh
);
3905 LMI_HELPER(PSUBUSH
, psubush
);
3906 LMI_HELPER(PSUBH
, psubh
);
3907 LMI_HELPER(PSUBW
, psubw
);
3908 LMI_HELPER(PSUBSB
, psubsb
);
3909 LMI_HELPER(PSUBUSB
, psubusb
);
3910 LMI_HELPER(PSUBB
, psubb
);
3912 LMI_HELPER(PSHUFH
, pshufh
);
3913 LMI_HELPER(PACKSSWH
, packsswh
);
3914 LMI_HELPER(PACKSSHB
, packsshb
);
3915 LMI_HELPER(PACKUSHB
, packushb
);
3917 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3918 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3919 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3920 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3921 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3922 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3924 LMI_HELPER(PAVGH
, pavgh
);
3925 LMI_HELPER(PAVGB
, pavgb
);
3926 LMI_HELPER(PMAXSH
, pmaxsh
);
3927 LMI_HELPER(PMINSH
, pminsh
);
3928 LMI_HELPER(PMAXUB
, pmaxub
);
3929 LMI_HELPER(PMINUB
, pminub
);
3931 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3932 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3933 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3934 LMI_HELPER(PCMPGTH
, pcmpgth
);
3935 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3936 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3938 LMI_HELPER(PSLLW
, psllw
);
3939 LMI_HELPER(PSLLH
, psllh
);
3940 LMI_HELPER(PSRLW
, psrlw
);
3941 LMI_HELPER(PSRLH
, psrlh
);
3942 LMI_HELPER(PSRAW
, psraw
);
3943 LMI_HELPER(PSRAH
, psrah
);
3945 LMI_HELPER(PMULLH
, pmullh
);
3946 LMI_HELPER(PMULHH
, pmulhh
);
3947 LMI_HELPER(PMULHUH
, pmulhuh
);
3948 LMI_HELPER(PMADDHW
, pmaddhw
);
3950 LMI_HELPER(PASUBUB
, pasubub
);
3951 LMI_HELPER_1(BIADD
, biadd
);
3952 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3954 LMI_DIRECT(PADDD
, paddd
, add
);
3955 LMI_DIRECT(PSUBD
, psubd
, sub
);
3956 LMI_DIRECT(XOR_CP2
, xor, xor);
3957 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3958 LMI_DIRECT(AND_CP2
, and, and);
3959 LMI_DIRECT(OR_CP2
, or, or);
3962 tcg_gen_andc_i64(t0
, t1
, t0
);
3966 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3969 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3972 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3975 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3979 tcg_gen_andi_i64(t1
, t1
, 3);
3980 tcg_gen_shli_i64(t1
, t1
, 4);
3981 tcg_gen_shr_i64(t0
, t0
, t1
);
3982 tcg_gen_ext16u_i64(t0
, t0
);
3986 tcg_gen_add_i64(t0
, t0
, t1
);
3987 tcg_gen_ext32s_i64(t0
, t0
);
3990 tcg_gen_sub_i64(t0
, t0
, t1
);
3991 tcg_gen_ext32s_i64(t0
, t0
);
4013 /* Make sure shift count isn't TCG undefined behaviour. */
4014 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4019 tcg_gen_shl_i64(t0
, t0
, t1
);
4023 /* Since SRA is UndefinedResult without sign-extended inputs,
4024 we can treat SRA and DSRA the same. */
4025 tcg_gen_sar_i64(t0
, t0
, t1
);
4028 /* We want to shift in zeros for SRL; zero-extend first. */
4029 tcg_gen_ext32u_i64(t0
, t0
);
4032 tcg_gen_shr_i64(t0
, t0
, t1
);
4036 if (shift_max
== 32) {
4037 tcg_gen_ext32s_i64(t0
, t0
);
4040 /* Shifts larger than MAX produce zero. */
4041 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4042 tcg_gen_neg_i64(t1
, t1
);
4043 tcg_gen_and_i64(t0
, t0
, t1
);
4049 TCGv_i64 t2
= tcg_temp_new_i64();
4050 TCGLabel
*lab
= gen_new_label();
4052 tcg_gen_mov_i64(t2
, t0
);
4053 tcg_gen_add_i64(t0
, t1
, t2
);
4054 if (opc
== OPC_ADD_CP2
) {
4055 tcg_gen_ext32s_i64(t0
, t0
);
4057 tcg_gen_xor_i64(t1
, t1
, t2
);
4058 tcg_gen_xor_i64(t2
, t2
, t0
);
4059 tcg_gen_andc_i64(t1
, t2
, t1
);
4060 tcg_temp_free_i64(t2
);
4061 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4062 generate_exception(ctx
, EXCP_OVERFLOW
);
4070 TCGv_i64 t2
= tcg_temp_new_i64();
4071 TCGLabel
*lab
= gen_new_label();
4073 tcg_gen_mov_i64(t2
, t0
);
4074 tcg_gen_sub_i64(t0
, t1
, t2
);
4075 if (opc
== OPC_SUB_CP2
) {
4076 tcg_gen_ext32s_i64(t0
, t0
);
4078 tcg_gen_xor_i64(t1
, t1
, t2
);
4079 tcg_gen_xor_i64(t2
, t2
, t0
);
4080 tcg_gen_and_i64(t1
, t1
, t2
);
4081 tcg_temp_free_i64(t2
);
4082 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4083 generate_exception(ctx
, EXCP_OVERFLOW
);
4089 tcg_gen_ext32u_i64(t0
, t0
);
4090 tcg_gen_ext32u_i64(t1
, t1
);
4091 tcg_gen_mul_i64(t0
, t0
, t1
);
4100 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4101 FD field is the CC field? */
4103 MIPS_INVAL("loongson_cp2");
4104 generate_exception_end(ctx
, EXCP_RI
);
4111 gen_store_fpr64(ctx
, t0
, rd
);
4113 tcg_temp_free_i64(t0
);
4114 tcg_temp_free_i64(t1
);
4118 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4119 int rs
, int rt
, int16_t imm
)
4122 TCGv t0
= tcg_temp_new();
4123 TCGv t1
= tcg_temp_new();
4126 /* Load needed operands */
4134 /* Compare two registers */
4136 gen_load_gpr(t0
, rs
);
4137 gen_load_gpr(t1
, rt
);
4147 /* Compare register to immediate */
4148 if (rs
!= 0 || imm
!= 0) {
4149 gen_load_gpr(t0
, rs
);
4150 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4157 case OPC_TEQ
: /* rs == rs */
4158 case OPC_TEQI
: /* r0 == 0 */
4159 case OPC_TGE
: /* rs >= rs */
4160 case OPC_TGEI
: /* r0 >= 0 */
4161 case OPC_TGEU
: /* rs >= rs unsigned */
4162 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4164 generate_exception_end(ctx
, EXCP_TRAP
);
4166 case OPC_TLT
: /* rs < rs */
4167 case OPC_TLTI
: /* r0 < 0 */
4168 case OPC_TLTU
: /* rs < rs unsigned */
4169 case OPC_TLTIU
: /* r0 < 0 unsigned */
4170 case OPC_TNE
: /* rs != rs */
4171 case OPC_TNEI
: /* r0 != 0 */
4172 /* Never trap: treat as NOP. */
4176 TCGLabel
*l1
= gen_new_label();
4181 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4185 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4189 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4193 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4197 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4201 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4204 generate_exception(ctx
, EXCP_TRAP
);
4211 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4213 if (unlikely(ctx
->singlestep_enabled
)) {
4217 #ifndef CONFIG_USER_ONLY
4218 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4224 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4226 if (use_goto_tb(ctx
, dest
)) {
4229 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4232 if (ctx
->singlestep_enabled
) {
4233 save_cpu_state(ctx
, 0);
4234 gen_helper_raise_exception_debug(cpu_env
);
4236 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
4240 /* Branches (before delay slot) */
4241 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4243 int rs
, int rt
, int32_t offset
,
4246 target_ulong btgt
= -1;
4248 int bcond_compute
= 0;
4249 TCGv t0
= tcg_temp_new();
4250 TCGv t1
= tcg_temp_new();
4252 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4253 #ifdef MIPS_DEBUG_DISAS
4254 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4255 TARGET_FMT_lx
"\n", ctx
->pc
);
4257 generate_exception_end(ctx
, EXCP_RI
);
4261 /* Load needed operands */
4267 /* Compare two registers */
4269 gen_load_gpr(t0
, rs
);
4270 gen_load_gpr(t1
, rt
);
4273 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4287 /* Compare to zero */
4289 gen_load_gpr(t0
, rs
);
4292 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4295 #if defined(TARGET_MIPS64)
4297 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4299 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4302 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4307 /* Jump to immediate */
4308 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4312 /* Jump to register */
4313 if (offset
!= 0 && offset
!= 16) {
4314 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4315 others are reserved. */
4316 MIPS_INVAL("jump hint");
4317 generate_exception_end(ctx
, EXCP_RI
);
4320 gen_load_gpr(btarget
, rs
);
4323 MIPS_INVAL("branch/jump");
4324 generate_exception_end(ctx
, EXCP_RI
);
4327 if (bcond_compute
== 0) {
4328 /* No condition to be computed */
4330 case OPC_BEQ
: /* rx == rx */
4331 case OPC_BEQL
: /* rx == rx likely */
4332 case OPC_BGEZ
: /* 0 >= 0 */
4333 case OPC_BGEZL
: /* 0 >= 0 likely */
4334 case OPC_BLEZ
: /* 0 <= 0 */
4335 case OPC_BLEZL
: /* 0 <= 0 likely */
4337 ctx
->hflags
|= MIPS_HFLAG_B
;
4339 case OPC_BGEZAL
: /* 0 >= 0 */
4340 case OPC_BGEZALL
: /* 0 >= 0 likely */
4341 /* Always take and link */
4343 ctx
->hflags
|= MIPS_HFLAG_B
;
4345 case OPC_BNE
: /* rx != rx */
4346 case OPC_BGTZ
: /* 0 > 0 */
4347 case OPC_BLTZ
: /* 0 < 0 */
4350 case OPC_BLTZAL
: /* 0 < 0 */
4351 /* Handle as an unconditional branch to get correct delay
4354 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4355 ctx
->hflags
|= MIPS_HFLAG_B
;
4357 case OPC_BLTZALL
: /* 0 < 0 likely */
4358 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4359 /* Skip the instruction in the delay slot */
4362 case OPC_BNEL
: /* rx != rx likely */
4363 case OPC_BGTZL
: /* 0 > 0 likely */
4364 case OPC_BLTZL
: /* 0 < 0 likely */
4365 /* Skip the instruction in the delay slot */
4369 ctx
->hflags
|= MIPS_HFLAG_B
;
4372 ctx
->hflags
|= MIPS_HFLAG_BX
;
4376 ctx
->hflags
|= MIPS_HFLAG_B
;
4379 ctx
->hflags
|= MIPS_HFLAG_BR
;
4383 ctx
->hflags
|= MIPS_HFLAG_BR
;
4386 MIPS_INVAL("branch/jump");
4387 generate_exception_end(ctx
, EXCP_RI
);
4393 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4396 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4399 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4402 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4405 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4411 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4415 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4419 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4422 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4425 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4428 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4431 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4434 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4437 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4439 #if defined(TARGET_MIPS64)
4441 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4445 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4448 ctx
->hflags
|= MIPS_HFLAG_BC
;
4451 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4454 ctx
->hflags
|= MIPS_HFLAG_BL
;
4457 MIPS_INVAL("conditional branch/jump");
4458 generate_exception_end(ctx
, EXCP_RI
);
4463 ctx
->btarget
= btgt
;
4465 switch (delayslot_size
) {
4467 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4470 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4475 int post_delay
= insn_bytes
+ delayslot_size
;
4476 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4478 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4482 if (insn_bytes
== 2)
4483 ctx
->hflags
|= MIPS_HFLAG_B16
;
4488 /* special3 bitfield operations */
4489 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4490 int rs
, int lsb
, int msb
)
4492 TCGv t0
= tcg_temp_new();
4493 TCGv t1
= tcg_temp_new();
4495 gen_load_gpr(t1
, rs
);
4498 if (lsb
+ msb
> 31) {
4502 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4504 /* The two checks together imply that lsb == 0,
4505 so this is a simple sign-extension. */
4506 tcg_gen_ext32s_tl(t0
, t1
);
4509 #if defined(TARGET_MIPS64)
4518 if (lsb
+ msb
> 63) {
4521 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
4528 gen_load_gpr(t0
, rt
);
4529 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4530 tcg_gen_ext32s_tl(t0
, t0
);
4532 #if defined(TARGET_MIPS64)
4543 gen_load_gpr(t0
, rt
);
4544 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4549 MIPS_INVAL("bitops");
4550 generate_exception_end(ctx
, EXCP_RI
);
4555 gen_store_gpr(t0
, rt
);
4560 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4565 /* If no destination, treat it as a NOP. */
4569 t0
= tcg_temp_new();
4570 gen_load_gpr(t0
, rt
);
4574 TCGv t1
= tcg_temp_new();
4575 TCGv t2
= tcg_const_tl(0x00FF00FF);
4577 tcg_gen_shri_tl(t1
, t0
, 8);
4578 tcg_gen_and_tl(t1
, t1
, t2
);
4579 tcg_gen_and_tl(t0
, t0
, t2
);
4580 tcg_gen_shli_tl(t0
, t0
, 8);
4581 tcg_gen_or_tl(t0
, t0
, t1
);
4584 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4588 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4591 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4593 #if defined(TARGET_MIPS64)
4596 TCGv t1
= tcg_temp_new();
4597 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
4599 tcg_gen_shri_tl(t1
, t0
, 8);
4600 tcg_gen_and_tl(t1
, t1
, t2
);
4601 tcg_gen_and_tl(t0
, t0
, t2
);
4602 tcg_gen_shli_tl(t0
, t0
, 8);
4603 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4610 TCGv t1
= tcg_temp_new();
4611 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
4613 tcg_gen_shri_tl(t1
, t0
, 16);
4614 tcg_gen_and_tl(t1
, t1
, t2
);
4615 tcg_gen_and_tl(t0
, t0
, t2
);
4616 tcg_gen_shli_tl(t0
, t0
, 16);
4617 tcg_gen_or_tl(t0
, t0
, t1
);
4618 tcg_gen_shri_tl(t1
, t0
, 32);
4619 tcg_gen_shli_tl(t0
, t0
, 32);
4620 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4627 MIPS_INVAL("bsfhl");
4628 generate_exception_end(ctx
, EXCP_RI
);
4635 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4644 t0
= tcg_temp_new();
4645 t1
= tcg_temp_new();
4646 gen_load_gpr(t0
, rs
);
4647 gen_load_gpr(t1
, rt
);
4648 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4649 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4650 if (opc
== OPC_LSA
) {
4651 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4660 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4668 t0
= tcg_temp_new();
4669 gen_load_gpr(t0
, rt
);
4673 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4675 #if defined(TARGET_MIPS64)
4677 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4682 TCGv t1
= tcg_temp_new();
4683 gen_load_gpr(t1
, rs
);
4687 TCGv_i64 t2
= tcg_temp_new_i64();
4688 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4689 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4690 gen_move_low32(cpu_gpr
[rd
], t2
);
4691 tcg_temp_free_i64(t2
);
4694 #if defined(TARGET_MIPS64)
4696 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4697 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4698 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4708 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4715 t0
= tcg_temp_new();
4716 gen_load_gpr(t0
, rt
);
4719 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4721 #if defined(TARGET_MIPS64)
4723 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4730 #ifndef CONFIG_USER_ONLY
4731 /* CP0 (MMU and control) */
4732 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4734 TCGv_i64 t0
= tcg_temp_new_i64();
4735 TCGv_i64 t1
= tcg_temp_new_i64();
4737 tcg_gen_ext_tl_i64(t0
, arg
);
4738 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4739 #if defined(TARGET_MIPS64)
4740 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4742 tcg_gen_concat32_i64(t1
, t1
, t0
);
4744 tcg_gen_st_i64(t1
, cpu_env
, off
);
4745 tcg_temp_free_i64(t1
);
4746 tcg_temp_free_i64(t0
);
4749 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4751 TCGv_i64 t0
= tcg_temp_new_i64();
4752 TCGv_i64 t1
= tcg_temp_new_i64();
4754 tcg_gen_ext_tl_i64(t0
, arg
);
4755 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4756 tcg_gen_concat32_i64(t1
, t1
, t0
);
4757 tcg_gen_st_i64(t1
, cpu_env
, off
);
4758 tcg_temp_free_i64(t1
);
4759 tcg_temp_free_i64(t0
);
4762 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4764 TCGv_i64 t0
= tcg_temp_new_i64();
4766 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4767 #if defined(TARGET_MIPS64)
4768 tcg_gen_shri_i64(t0
, t0
, 30);
4770 tcg_gen_shri_i64(t0
, t0
, 32);
4772 gen_move_low32(arg
, t0
);
4773 tcg_temp_free_i64(t0
);
4776 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4778 TCGv_i64 t0
= tcg_temp_new_i64();
4780 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4781 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4782 gen_move_low32(arg
, t0
);
4783 tcg_temp_free_i64(t0
);
4786 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4788 TCGv_i32 t0
= tcg_temp_new_i32();
4790 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4791 tcg_gen_ext_i32_tl(arg
, t0
);
4792 tcg_temp_free_i32(t0
);
4795 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4797 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4798 tcg_gen_ext32s_tl(arg
, arg
);
4801 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4803 TCGv_i32 t0
= tcg_temp_new_i32();
4805 tcg_gen_trunc_tl_i32(t0
, arg
);
4806 tcg_gen_st_i32(t0
, cpu_env
, off
);
4807 tcg_temp_free_i32(t0
);
4810 #define CP0_CHECK(c) \
4813 goto cp0_unimplemented; \
4817 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4819 const char *rn
= "invalid";
4821 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4827 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4831 goto cp0_unimplemented
;
4837 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4841 goto cp0_unimplemented
;
4847 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4848 ctx
->CP0_LLAddr_shift
);
4852 CP0_CHECK(ctx
->mrp
);
4853 gen_helper_mfhc0_maar(arg
, cpu_env
);
4857 goto cp0_unimplemented
;
4866 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4870 goto cp0_unimplemented
;
4874 goto cp0_unimplemented
;
4876 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
4880 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4881 tcg_gen_movi_tl(arg
, 0);
4884 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4886 const char *rn
= "invalid";
4887 uint64_t mask
= ctx
->PAMask
>> 36;
4889 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4895 tcg_gen_andi_tl(arg
, arg
, mask
);
4896 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4900 goto cp0_unimplemented
;
4906 tcg_gen_andi_tl(arg
, arg
, mask
);
4907 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4911 goto cp0_unimplemented
;
4917 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4918 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4919 relevant for modern MIPS cores supporting MTHC0, therefore
4920 treating MTHC0 to LLAddr as NOP. */
4924 CP0_CHECK(ctx
->mrp
);
4925 gen_helper_mthc0_maar(cpu_env
, arg
);
4929 goto cp0_unimplemented
;
4938 tcg_gen_andi_tl(arg
, arg
, mask
);
4939 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4943 goto cp0_unimplemented
;
4947 goto cp0_unimplemented
;
4949 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
4952 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4955 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4957 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4958 tcg_gen_movi_tl(arg
, 0);
4960 tcg_gen_movi_tl(arg
, ~0);
4964 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4966 const char *rn
= "invalid";
4969 check_insn(ctx
, ISA_MIPS32
);
4975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4990 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4995 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4999 goto cp0_unimplemented
;
5005 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5006 gen_helper_mfc0_random(arg
, cpu_env
);
5010 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5011 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5015 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5020 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5021 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5025 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5026 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5030 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5031 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5035 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5036 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5037 rn
= "VPEScheFBack";
5040 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5041 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5045 goto cp0_unimplemented
;
5052 TCGv_i64 tmp
= tcg_temp_new_i64();
5053 tcg_gen_ld_i64(tmp
, cpu_env
,
5054 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5055 #if defined(TARGET_MIPS64)
5057 /* Move RI/XI fields to bits 31:30 */
5058 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5059 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5062 gen_move_low32(arg
, tmp
);
5063 tcg_temp_free_i64(tmp
);
5068 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5069 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5073 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5074 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5078 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5079 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5083 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5084 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5088 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5089 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5093 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5094 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5098 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5099 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5103 goto cp0_unimplemented
;
5110 TCGv_i64 tmp
= tcg_temp_new_i64();
5111 tcg_gen_ld_i64(tmp
, cpu_env
,
5112 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5113 #if defined(TARGET_MIPS64)
5115 /* Move RI/XI fields to bits 31:30 */
5116 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5117 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5120 gen_move_low32(arg
, tmp
);
5121 tcg_temp_free_i64(tmp
);
5127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5128 rn
= "GlobalNumber";
5131 goto cp0_unimplemented
;
5137 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5138 tcg_gen_ext32s_tl(arg
, arg
);
5142 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5143 rn
= "ContextConfig";
5144 goto cp0_unimplemented
;
5146 CP0_CHECK(ctx
->ulri
);
5147 tcg_gen_ld32s_tl(arg
, cpu_env
,
5148 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5152 goto cp0_unimplemented
;
5158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5162 check_insn(ctx
, ISA_MIPS32R2
);
5163 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5167 goto cp0_unimplemented
;
5173 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5177 check_insn(ctx
, ISA_MIPS32R2
);
5178 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5182 check_insn(ctx
, ISA_MIPS32R2
);
5183 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5187 check_insn(ctx
, ISA_MIPS32R2
);
5188 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5192 check_insn(ctx
, ISA_MIPS32R2
);
5193 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5197 check_insn(ctx
, ISA_MIPS32R2
);
5198 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5202 goto cp0_unimplemented
;
5208 check_insn(ctx
, ISA_MIPS32R2
);
5209 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5213 goto cp0_unimplemented
;
5219 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5220 tcg_gen_ext32s_tl(arg
, arg
);
5225 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5230 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5234 goto cp0_unimplemented
;
5240 /* Mark as an IO operation because we read the time. */
5241 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5244 gen_helper_mfc0_count(arg
, cpu_env
);
5245 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5248 /* Break the TB to be able to take timer interrupts immediately
5249 after reading count. */
5250 ctx
->bstate
= BS_STOP
;
5253 /* 6,7 are implementation dependent */
5255 goto cp0_unimplemented
;
5261 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5262 tcg_gen_ext32s_tl(arg
, arg
);
5266 goto cp0_unimplemented
;
5272 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5275 /* 6,7 are implementation dependent */
5277 goto cp0_unimplemented
;
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5287 check_insn(ctx
, ISA_MIPS32R2
);
5288 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5292 check_insn(ctx
, ISA_MIPS32R2
);
5293 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5297 check_insn(ctx
, ISA_MIPS32R2
);
5298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5302 goto cp0_unimplemented
;
5308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5312 goto cp0_unimplemented
;
5318 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5319 tcg_gen_ext32s_tl(arg
, arg
);
5323 goto cp0_unimplemented
;
5329 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5333 check_insn(ctx
, ISA_MIPS32R2
);
5334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5338 check_insn(ctx
, ISA_MIPS32R2
);
5339 CP0_CHECK(ctx
->cmgcr
);
5340 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5341 tcg_gen_ext32s_tl(arg
, arg
);
5345 goto cp0_unimplemented
;
5351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5355 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5363 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5367 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5371 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5374 /* 6,7 are implementation dependent */
5376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5380 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5384 goto cp0_unimplemented
;
5390 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5394 CP0_CHECK(ctx
->mrp
);
5395 gen_helper_mfc0_maar(arg
, cpu_env
);
5399 CP0_CHECK(ctx
->mrp
);
5400 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5404 goto cp0_unimplemented
;
5410 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5414 goto cp0_unimplemented
;
5420 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5424 goto cp0_unimplemented
;
5430 #if defined(TARGET_MIPS64)
5431 check_insn(ctx
, ISA_MIPS3
);
5432 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5433 tcg_gen_ext32s_tl(arg
, arg
);
5438 goto cp0_unimplemented
;
5442 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5443 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5446 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5450 goto cp0_unimplemented
;
5454 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5455 rn
= "'Diagnostic"; /* implementation dependent */
5460 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5464 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5465 rn
= "TraceControl";
5466 goto cp0_unimplemented
;
5468 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5469 rn
= "TraceControl2";
5470 goto cp0_unimplemented
;
5472 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5473 rn
= "UserTraceData";
5474 goto cp0_unimplemented
;
5476 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5478 goto cp0_unimplemented
;
5480 goto cp0_unimplemented
;
5487 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5488 tcg_gen_ext32s_tl(arg
, arg
);
5492 goto cp0_unimplemented
;
5498 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5499 rn
= "Performance0";
5502 // gen_helper_mfc0_performance1(arg);
5503 rn
= "Performance1";
5504 goto cp0_unimplemented
;
5506 // gen_helper_mfc0_performance2(arg);
5507 rn
= "Performance2";
5508 goto cp0_unimplemented
;
5510 // gen_helper_mfc0_performance3(arg);
5511 rn
= "Performance3";
5512 goto cp0_unimplemented
;
5514 // gen_helper_mfc0_performance4(arg);
5515 rn
= "Performance4";
5516 goto cp0_unimplemented
;
5518 // gen_helper_mfc0_performance5(arg);
5519 rn
= "Performance5";
5520 goto cp0_unimplemented
;
5522 // gen_helper_mfc0_performance6(arg);
5523 rn
= "Performance6";
5524 goto cp0_unimplemented
;
5526 // gen_helper_mfc0_performance7(arg);
5527 rn
= "Performance7";
5528 goto cp0_unimplemented
;
5530 goto cp0_unimplemented
;
5536 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5540 goto cp0_unimplemented
;
5546 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5550 goto cp0_unimplemented
;
5560 TCGv_i64 tmp
= tcg_temp_new_i64();
5561 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5562 gen_move_low32(arg
, tmp
);
5563 tcg_temp_free_i64(tmp
);
5571 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5575 goto cp0_unimplemented
;
5584 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5591 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5595 goto cp0_unimplemented
;
5601 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5602 tcg_gen_ext32s_tl(arg
, arg
);
5606 goto cp0_unimplemented
;
5613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5617 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5618 tcg_gen_ld_tl(arg
, cpu_env
,
5619 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5620 tcg_gen_ext32s_tl(arg
, arg
);
5624 goto cp0_unimplemented
;
5628 goto cp0_unimplemented
;
5630 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
5634 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5635 gen_mfc0_unimplemented(ctx
, arg
);
5638 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5640 const char *rn
= "invalid";
5643 check_insn(ctx
, ISA_MIPS32
);
5645 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5653 gen_helper_mtc0_index(cpu_env
, arg
);
5657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5658 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 goto cp0_unimplemented
;
5687 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5688 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5692 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5693 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5697 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5698 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5702 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5703 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5707 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5708 tcg_gen_st_tl(arg
, cpu_env
,
5709 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5713 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5714 tcg_gen_st_tl(arg
, cpu_env
,
5715 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5716 rn
= "VPEScheFBack";
5719 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5720 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5724 goto cp0_unimplemented
;
5730 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5734 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5735 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5739 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5740 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5744 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5745 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5749 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5750 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5754 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5755 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5759 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5760 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5764 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5765 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5769 goto cp0_unimplemented
;
5775 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5781 rn
= "GlobalNumber";
5784 goto cp0_unimplemented
;
5790 gen_helper_mtc0_context(cpu_env
, arg
);
5794 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5795 rn
= "ContextConfig";
5796 goto cp0_unimplemented
;
5798 CP0_CHECK(ctx
->ulri
);
5799 tcg_gen_st_tl(arg
, cpu_env
,
5800 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5804 goto cp0_unimplemented
;
5810 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5814 check_insn(ctx
, ISA_MIPS32R2
);
5815 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5817 ctx
->bstate
= BS_STOP
;
5820 goto cp0_unimplemented
;
5826 gen_helper_mtc0_wired(cpu_env
, arg
);
5830 check_insn(ctx
, ISA_MIPS32R2
);
5831 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5835 check_insn(ctx
, ISA_MIPS32R2
);
5836 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5840 check_insn(ctx
, ISA_MIPS32R2
);
5841 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5845 check_insn(ctx
, ISA_MIPS32R2
);
5846 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5850 check_insn(ctx
, ISA_MIPS32R2
);
5851 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5855 goto cp0_unimplemented
;
5861 check_insn(ctx
, ISA_MIPS32R2
);
5862 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5863 ctx
->bstate
= BS_STOP
;
5867 goto cp0_unimplemented
;
5885 goto cp0_unimplemented
;
5891 gen_helper_mtc0_count(cpu_env
, arg
);
5894 /* 6,7 are implementation dependent */
5896 goto cp0_unimplemented
;
5902 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5906 goto cp0_unimplemented
;
5912 gen_helper_mtc0_compare(cpu_env
, arg
);
5915 /* 6,7 are implementation dependent */
5917 goto cp0_unimplemented
;
5923 save_cpu_state(ctx
, 1);
5924 gen_helper_mtc0_status(cpu_env
, arg
);
5925 /* BS_STOP isn't good enough here, hflags may have changed. */
5926 gen_save_pc(ctx
->pc
+ 4);
5927 ctx
->bstate
= BS_EXCP
;
5931 check_insn(ctx
, ISA_MIPS32R2
);
5932 gen_helper_mtc0_intctl(cpu_env
, arg
);
5933 /* Stop translation as we may have switched the execution mode */
5934 ctx
->bstate
= BS_STOP
;
5938 check_insn(ctx
, ISA_MIPS32R2
);
5939 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5940 /* Stop translation as we may have switched the execution mode */
5941 ctx
->bstate
= BS_STOP
;
5945 check_insn(ctx
, ISA_MIPS32R2
);
5946 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5947 /* Stop translation as we may have switched the execution mode */
5948 ctx
->bstate
= BS_STOP
;
5952 goto cp0_unimplemented
;
5958 save_cpu_state(ctx
, 1);
5959 gen_helper_mtc0_cause(cpu_env
, arg
);
5963 goto cp0_unimplemented
;
5969 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5973 goto cp0_unimplemented
;
5983 check_insn(ctx
, ISA_MIPS32R2
);
5984 gen_helper_mtc0_ebase(cpu_env
, arg
);
5988 goto cp0_unimplemented
;
5994 gen_helper_mtc0_config0(cpu_env
, arg
);
5996 /* Stop translation as we may have switched the execution mode */
5997 ctx
->bstate
= BS_STOP
;
6000 /* ignored, read only */
6004 gen_helper_mtc0_config2(cpu_env
, arg
);
6006 /* Stop translation as we may have switched the execution mode */
6007 ctx
->bstate
= BS_STOP
;
6010 gen_helper_mtc0_config3(cpu_env
, arg
);
6012 /* Stop translation as we may have switched the execution mode */
6013 ctx
->bstate
= BS_STOP
;
6016 gen_helper_mtc0_config4(cpu_env
, arg
);
6018 ctx
->bstate
= BS_STOP
;
6021 gen_helper_mtc0_config5(cpu_env
, arg
);
6023 /* Stop translation as we may have switched the execution mode */
6024 ctx
->bstate
= BS_STOP
;
6026 /* 6,7 are implementation dependent */
6036 rn
= "Invalid config selector";
6037 goto cp0_unimplemented
;
6043 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6047 CP0_CHECK(ctx
->mrp
);
6048 gen_helper_mtc0_maar(cpu_env
, arg
);
6052 CP0_CHECK(ctx
->mrp
);
6053 gen_helper_mtc0_maari(cpu_env
, arg
);
6057 goto cp0_unimplemented
;
6063 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6067 goto cp0_unimplemented
;
6073 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6077 goto cp0_unimplemented
;
6083 #if defined(TARGET_MIPS64)
6084 check_insn(ctx
, ISA_MIPS3
);
6085 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6090 goto cp0_unimplemented
;
6094 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6095 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6098 gen_helper_mtc0_framemask(cpu_env
, arg
);
6102 goto cp0_unimplemented
;
6107 rn
= "Diagnostic"; /* implementation dependent */
6112 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6113 /* BS_STOP isn't good enough here, hflags may have changed. */
6114 gen_save_pc(ctx
->pc
+ 4);
6115 ctx
->bstate
= BS_EXCP
;
6119 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6120 rn
= "TraceControl";
6121 /* Stop translation as we may have switched the execution mode */
6122 ctx
->bstate
= BS_STOP
;
6123 goto cp0_unimplemented
;
6125 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6126 rn
= "TraceControl2";
6127 /* Stop translation as we may have switched the execution mode */
6128 ctx
->bstate
= BS_STOP
;
6129 goto cp0_unimplemented
;
6131 /* Stop translation as we may have switched the execution mode */
6132 ctx
->bstate
= BS_STOP
;
6133 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6134 rn
= "UserTraceData";
6135 /* Stop translation as we may have switched the execution mode */
6136 ctx
->bstate
= BS_STOP
;
6137 goto cp0_unimplemented
;
6139 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6140 /* Stop translation as we may have switched the execution mode */
6141 ctx
->bstate
= BS_STOP
;
6143 goto cp0_unimplemented
;
6145 goto cp0_unimplemented
;
6152 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6156 goto cp0_unimplemented
;
6162 gen_helper_mtc0_performance0(cpu_env
, arg
);
6163 rn
= "Performance0";
6166 // gen_helper_mtc0_performance1(arg);
6167 rn
= "Performance1";
6168 goto cp0_unimplemented
;
6170 // gen_helper_mtc0_performance2(arg);
6171 rn
= "Performance2";
6172 goto cp0_unimplemented
;
6174 // gen_helper_mtc0_performance3(arg);
6175 rn
= "Performance3";
6176 goto cp0_unimplemented
;
6178 // gen_helper_mtc0_performance4(arg);
6179 rn
= "Performance4";
6180 goto cp0_unimplemented
;
6182 // gen_helper_mtc0_performance5(arg);
6183 rn
= "Performance5";
6184 goto cp0_unimplemented
;
6186 // gen_helper_mtc0_performance6(arg);
6187 rn
= "Performance6";
6188 goto cp0_unimplemented
;
6190 // gen_helper_mtc0_performance7(arg);
6191 rn
= "Performance7";
6192 goto cp0_unimplemented
;
6194 goto cp0_unimplemented
;
6200 gen_helper_mtc0_errctl(cpu_env
, arg
);
6201 ctx
->bstate
= BS_STOP
;
6205 goto cp0_unimplemented
;
6215 goto cp0_unimplemented
;
6224 gen_helper_mtc0_taglo(cpu_env
, arg
);
6231 gen_helper_mtc0_datalo(cpu_env
, arg
);
6235 goto cp0_unimplemented
;
6244 gen_helper_mtc0_taghi(cpu_env
, arg
);
6251 gen_helper_mtc0_datahi(cpu_env
, arg
);
6256 goto cp0_unimplemented
;
6262 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6266 goto cp0_unimplemented
;
6273 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6277 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6278 tcg_gen_st_tl(arg
, cpu_env
,
6279 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6283 goto cp0_unimplemented
;
6285 /* Stop translation as we may have switched the execution mode */
6286 ctx
->bstate
= BS_STOP
;
6289 goto cp0_unimplemented
;
6291 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
6293 /* For simplicity assume that all writes can cause interrupts. */
6294 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6296 ctx
->bstate
= BS_STOP
;
6301 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6304 #if defined(TARGET_MIPS64)
6305 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6307 const char *rn
= "invalid";
6310 check_insn(ctx
, ISA_MIPS64
);
6316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6320 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6321 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6325 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6326 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6330 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6331 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6340 goto cp0_unimplemented
;
6346 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6347 gen_helper_mfc0_random(arg
, cpu_env
);
6351 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6356 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6357 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6361 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6362 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6366 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6367 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6371 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6372 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6376 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6377 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6378 rn
= "VPEScheFBack";
6381 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6382 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6386 goto cp0_unimplemented
;
6392 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6396 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6397 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6401 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6402 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6406 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6407 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6411 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6412 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6416 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6417 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6421 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6422 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6426 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6427 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6431 goto cp0_unimplemented
;
6437 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6443 rn
= "GlobalNumber";
6446 goto cp0_unimplemented
;
6452 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6456 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6457 rn
= "ContextConfig";
6458 goto cp0_unimplemented
;
6460 CP0_CHECK(ctx
->ulri
);
6461 tcg_gen_ld_tl(arg
, cpu_env
,
6462 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6466 goto cp0_unimplemented
;
6472 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6476 check_insn(ctx
, ISA_MIPS32R2
);
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6481 goto cp0_unimplemented
;
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6491 check_insn(ctx
, ISA_MIPS32R2
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6496 check_insn(ctx
, ISA_MIPS32R2
);
6497 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6501 check_insn(ctx
, ISA_MIPS32R2
);
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6506 check_insn(ctx
, ISA_MIPS32R2
);
6507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6511 check_insn(ctx
, ISA_MIPS32R2
);
6512 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6516 goto cp0_unimplemented
;
6522 check_insn(ctx
, ISA_MIPS32R2
);
6523 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6527 goto cp0_unimplemented
;
6533 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6543 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6547 goto cp0_unimplemented
;
6553 /* Mark as an IO operation because we read the time. */
6554 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6557 gen_helper_mfc0_count(arg
, cpu_env
);
6558 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6561 /* Break the TB to be able to take timer interrupts immediately
6562 after reading count. */
6563 ctx
->bstate
= BS_STOP
;
6566 /* 6,7 are implementation dependent */
6568 goto cp0_unimplemented
;
6574 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6578 goto cp0_unimplemented
;
6584 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6587 /* 6,7 are implementation dependent */
6589 goto cp0_unimplemented
;
6595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6599 check_insn(ctx
, ISA_MIPS32R2
);
6600 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6604 check_insn(ctx
, ISA_MIPS32R2
);
6605 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6609 check_insn(ctx
, ISA_MIPS32R2
);
6610 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6614 goto cp0_unimplemented
;
6620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6624 goto cp0_unimplemented
;
6630 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6634 goto cp0_unimplemented
;
6640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6644 check_insn(ctx
, ISA_MIPS32R2
);
6645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6649 check_insn(ctx
, ISA_MIPS32R2
);
6650 CP0_CHECK(ctx
->cmgcr
);
6651 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6655 goto cp0_unimplemented
;
6661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6677 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6684 /* 6,7 are implementation dependent */
6686 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6694 goto cp0_unimplemented
;
6700 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6704 CP0_CHECK(ctx
->mrp
);
6705 gen_helper_dmfc0_maar(arg
, cpu_env
);
6709 CP0_CHECK(ctx
->mrp
);
6710 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6714 goto cp0_unimplemented
;
6720 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6724 goto cp0_unimplemented
;
6730 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6734 goto cp0_unimplemented
;
6740 check_insn(ctx
, ISA_MIPS3
);
6741 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6745 goto cp0_unimplemented
;
6749 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6750 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6757 goto cp0_unimplemented
;
6761 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6762 rn
= "'Diagnostic"; /* implementation dependent */
6767 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6771 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6772 rn
= "TraceControl";
6773 goto cp0_unimplemented
;
6775 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6776 rn
= "TraceControl2";
6777 goto cp0_unimplemented
;
6779 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6780 rn
= "UserTraceData";
6781 goto cp0_unimplemented
;
6783 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6785 goto cp0_unimplemented
;
6787 goto cp0_unimplemented
;
6794 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6798 goto cp0_unimplemented
;
6804 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6805 rn
= "Performance0";
6808 // gen_helper_dmfc0_performance1(arg);
6809 rn
= "Performance1";
6810 goto cp0_unimplemented
;
6812 // gen_helper_dmfc0_performance2(arg);
6813 rn
= "Performance2";
6814 goto cp0_unimplemented
;
6816 // gen_helper_dmfc0_performance3(arg);
6817 rn
= "Performance3";
6818 goto cp0_unimplemented
;
6820 // gen_helper_dmfc0_performance4(arg);
6821 rn
= "Performance4";
6822 goto cp0_unimplemented
;
6824 // gen_helper_dmfc0_performance5(arg);
6825 rn
= "Performance5";
6826 goto cp0_unimplemented
;
6828 // gen_helper_dmfc0_performance6(arg);
6829 rn
= "Performance6";
6830 goto cp0_unimplemented
;
6832 // gen_helper_dmfc0_performance7(arg);
6833 rn
= "Performance7";
6834 goto cp0_unimplemented
;
6836 goto cp0_unimplemented
;
6842 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6846 goto cp0_unimplemented
;
6853 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6857 goto cp0_unimplemented
;
6866 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6873 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6877 goto cp0_unimplemented
;
6886 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6893 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6897 goto cp0_unimplemented
;
6903 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6907 goto cp0_unimplemented
;
6914 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6918 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6919 tcg_gen_ld_tl(arg
, cpu_env
,
6920 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6924 goto cp0_unimplemented
;
6928 goto cp0_unimplemented
;
6930 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
6934 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6935 gen_mfc0_unimplemented(ctx
, arg
);
6938 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6940 const char *rn
= "invalid";
6943 check_insn(ctx
, ISA_MIPS64
);
6945 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6953 gen_helper_mtc0_index(cpu_env
, arg
);
6957 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6958 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6962 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6977 goto cp0_unimplemented
;
6987 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6988 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6992 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6993 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6997 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6998 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7002 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7003 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7007 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7008 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7012 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7013 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7014 rn
= "VPEScheFBack";
7017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7018 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7022 goto cp0_unimplemented
;
7028 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7033 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7038 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7043 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7048 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7053 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7058 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7063 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7067 goto cp0_unimplemented
;
7073 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7079 rn
= "GlobalNumber";
7082 goto cp0_unimplemented
;
7088 gen_helper_mtc0_context(cpu_env
, arg
);
7092 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7093 rn
= "ContextConfig";
7094 goto cp0_unimplemented
;
7096 CP0_CHECK(ctx
->ulri
);
7097 tcg_gen_st_tl(arg
, cpu_env
,
7098 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7102 goto cp0_unimplemented
;
7108 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7112 check_insn(ctx
, ISA_MIPS32R2
);
7113 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7117 goto cp0_unimplemented
;
7123 gen_helper_mtc0_wired(cpu_env
, arg
);
7127 check_insn(ctx
, ISA_MIPS32R2
);
7128 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7132 check_insn(ctx
, ISA_MIPS32R2
);
7133 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7142 check_insn(ctx
, ISA_MIPS32R2
);
7143 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7147 check_insn(ctx
, ISA_MIPS32R2
);
7148 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7152 goto cp0_unimplemented
;
7158 check_insn(ctx
, ISA_MIPS32R2
);
7159 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7160 ctx
->bstate
= BS_STOP
;
7164 goto cp0_unimplemented
;
7182 goto cp0_unimplemented
;
7188 gen_helper_mtc0_count(cpu_env
, arg
);
7191 /* 6,7 are implementation dependent */
7193 goto cp0_unimplemented
;
7195 /* Stop translation as we may have switched the execution mode */
7196 ctx
->bstate
= BS_STOP
;
7201 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7205 goto cp0_unimplemented
;
7211 gen_helper_mtc0_compare(cpu_env
, arg
);
7214 /* 6,7 are implementation dependent */
7216 goto cp0_unimplemented
;
7218 /* Stop translation as we may have switched the execution mode */
7219 ctx
->bstate
= BS_STOP
;
7224 save_cpu_state(ctx
, 1);
7225 gen_helper_mtc0_status(cpu_env
, arg
);
7226 /* BS_STOP isn't good enough here, hflags may have changed. */
7227 gen_save_pc(ctx
->pc
+ 4);
7228 ctx
->bstate
= BS_EXCP
;
7232 check_insn(ctx
, ISA_MIPS32R2
);
7233 gen_helper_mtc0_intctl(cpu_env
, arg
);
7234 /* Stop translation as we may have switched the execution mode */
7235 ctx
->bstate
= BS_STOP
;
7239 check_insn(ctx
, ISA_MIPS32R2
);
7240 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7241 /* Stop translation as we may have switched the execution mode */
7242 ctx
->bstate
= BS_STOP
;
7246 check_insn(ctx
, ISA_MIPS32R2
);
7247 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7248 /* Stop translation as we may have switched the execution mode */
7249 ctx
->bstate
= BS_STOP
;
7253 goto cp0_unimplemented
;
7259 save_cpu_state(ctx
, 1);
7260 /* Mark as an IO operation because we may trigger a software
7262 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7265 gen_helper_mtc0_cause(cpu_env
, arg
);
7266 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7269 /* Stop translation as we may have triggered an intetrupt */
7270 ctx
->bstate
= BS_STOP
;
7274 goto cp0_unimplemented
;
7280 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7284 goto cp0_unimplemented
;
7294 check_insn(ctx
, ISA_MIPS32R2
);
7295 gen_helper_mtc0_ebase(cpu_env
, arg
);
7299 goto cp0_unimplemented
;
7305 gen_helper_mtc0_config0(cpu_env
, arg
);
7307 /* Stop translation as we may have switched the execution mode */
7308 ctx
->bstate
= BS_STOP
;
7311 /* ignored, read only */
7315 gen_helper_mtc0_config2(cpu_env
, arg
);
7317 /* Stop translation as we may have switched the execution mode */
7318 ctx
->bstate
= BS_STOP
;
7321 gen_helper_mtc0_config3(cpu_env
, arg
);
7323 /* Stop translation as we may have switched the execution mode */
7324 ctx
->bstate
= BS_STOP
;
7327 /* currently ignored */
7331 gen_helper_mtc0_config5(cpu_env
, arg
);
7333 /* Stop translation as we may have switched the execution mode */
7334 ctx
->bstate
= BS_STOP
;
7336 /* 6,7 are implementation dependent */
7338 rn
= "Invalid config selector";
7339 goto cp0_unimplemented
;
7345 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7349 CP0_CHECK(ctx
->mrp
);
7350 gen_helper_mtc0_maar(cpu_env
, arg
);
7354 CP0_CHECK(ctx
->mrp
);
7355 gen_helper_mtc0_maari(cpu_env
, arg
);
7359 goto cp0_unimplemented
;
7365 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7369 goto cp0_unimplemented
;
7375 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7379 goto cp0_unimplemented
;
7385 check_insn(ctx
, ISA_MIPS3
);
7386 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7390 goto cp0_unimplemented
;
7394 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7395 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7398 gen_helper_mtc0_framemask(cpu_env
, arg
);
7402 goto cp0_unimplemented
;
7407 rn
= "Diagnostic"; /* implementation dependent */
7412 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7413 /* BS_STOP isn't good enough here, hflags may have changed. */
7414 gen_save_pc(ctx
->pc
+ 4);
7415 ctx
->bstate
= BS_EXCP
;
7419 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7420 /* Stop translation as we may have switched the execution mode */
7421 ctx
->bstate
= BS_STOP
;
7422 rn
= "TraceControl";
7423 goto cp0_unimplemented
;
7425 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7426 /* Stop translation as we may have switched the execution mode */
7427 ctx
->bstate
= BS_STOP
;
7428 rn
= "TraceControl2";
7429 goto cp0_unimplemented
;
7431 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7432 /* Stop translation as we may have switched the execution mode */
7433 ctx
->bstate
= BS_STOP
;
7434 rn
= "UserTraceData";
7435 goto cp0_unimplemented
;
7437 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7438 /* Stop translation as we may have switched the execution mode */
7439 ctx
->bstate
= BS_STOP
;
7441 goto cp0_unimplemented
;
7443 goto cp0_unimplemented
;
7450 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7454 goto cp0_unimplemented
;
7460 gen_helper_mtc0_performance0(cpu_env
, arg
);
7461 rn
= "Performance0";
7464 // gen_helper_mtc0_performance1(cpu_env, arg);
7465 rn
= "Performance1";
7466 goto cp0_unimplemented
;
7468 // gen_helper_mtc0_performance2(cpu_env, arg);
7469 rn
= "Performance2";
7470 goto cp0_unimplemented
;
7472 // gen_helper_mtc0_performance3(cpu_env, arg);
7473 rn
= "Performance3";
7474 goto cp0_unimplemented
;
7476 // gen_helper_mtc0_performance4(cpu_env, arg);
7477 rn
= "Performance4";
7478 goto cp0_unimplemented
;
7480 // gen_helper_mtc0_performance5(cpu_env, arg);
7481 rn
= "Performance5";
7482 goto cp0_unimplemented
;
7484 // gen_helper_mtc0_performance6(cpu_env, arg);
7485 rn
= "Performance6";
7486 goto cp0_unimplemented
;
7488 // gen_helper_mtc0_performance7(cpu_env, arg);
7489 rn
= "Performance7";
7490 goto cp0_unimplemented
;
7492 goto cp0_unimplemented
;
7498 gen_helper_mtc0_errctl(cpu_env
, arg
);
7499 ctx
->bstate
= BS_STOP
;
7503 goto cp0_unimplemented
;
7513 goto cp0_unimplemented
;
7522 gen_helper_mtc0_taglo(cpu_env
, arg
);
7529 gen_helper_mtc0_datalo(cpu_env
, arg
);
7533 goto cp0_unimplemented
;
7542 gen_helper_mtc0_taghi(cpu_env
, arg
);
7549 gen_helper_mtc0_datahi(cpu_env
, arg
);
7554 goto cp0_unimplemented
;
7560 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7564 goto cp0_unimplemented
;
7571 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7575 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7576 tcg_gen_st_tl(arg
, cpu_env
,
7577 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7581 goto cp0_unimplemented
;
7583 /* Stop translation as we may have switched the execution mode */
7584 ctx
->bstate
= BS_STOP
;
7587 goto cp0_unimplemented
;
7589 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
7591 /* For simplicity assume that all writes can cause interrupts. */
7592 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7594 ctx
->bstate
= BS_STOP
;
7599 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7601 #endif /* TARGET_MIPS64 */
7603 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7604 int u
, int sel
, int h
)
7606 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7607 TCGv t0
= tcg_temp_local_new();
7609 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7610 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7611 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7612 tcg_gen_movi_tl(t0
, -1);
7613 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7614 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7615 tcg_gen_movi_tl(t0
, -1);
7621 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7624 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7634 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7637 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7640 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7643 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7646 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7649 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7652 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7655 gen_mfc0(ctx
, t0
, rt
, sel
);
7662 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7665 gen_mfc0(ctx
, t0
, rt
, sel
);
7671 gen_helper_mftc0_status(t0
, cpu_env
);
7674 gen_mfc0(ctx
, t0
, rt
, sel
);
7680 gen_helper_mftc0_cause(t0
, cpu_env
);
7690 gen_helper_mftc0_epc(t0
, cpu_env
);
7700 gen_helper_mftc0_ebase(t0
, cpu_env
);
7710 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7720 gen_helper_mftc0_debug(t0
, cpu_env
);
7723 gen_mfc0(ctx
, t0
, rt
, sel
);
7728 gen_mfc0(ctx
, t0
, rt
, sel
);
7730 } else switch (sel
) {
7731 /* GPR registers. */
7733 gen_helper_1e0i(mftgpr
, t0
, rt
);
7735 /* Auxiliary CPU registers */
7739 gen_helper_1e0i(mftlo
, t0
, 0);
7742 gen_helper_1e0i(mfthi
, t0
, 0);
7745 gen_helper_1e0i(mftacx
, t0
, 0);
7748 gen_helper_1e0i(mftlo
, t0
, 1);
7751 gen_helper_1e0i(mfthi
, t0
, 1);
7754 gen_helper_1e0i(mftacx
, t0
, 1);
7757 gen_helper_1e0i(mftlo
, t0
, 2);
7760 gen_helper_1e0i(mfthi
, t0
, 2);
7763 gen_helper_1e0i(mftacx
, t0
, 2);
7766 gen_helper_1e0i(mftlo
, t0
, 3);
7769 gen_helper_1e0i(mfthi
, t0
, 3);
7772 gen_helper_1e0i(mftacx
, t0
, 3);
7775 gen_helper_mftdsp(t0
, cpu_env
);
7781 /* Floating point (COP1). */
7783 /* XXX: For now we support only a single FPU context. */
7785 TCGv_i32 fp0
= tcg_temp_new_i32();
7787 gen_load_fpr32(ctx
, fp0
, rt
);
7788 tcg_gen_ext_i32_tl(t0
, fp0
);
7789 tcg_temp_free_i32(fp0
);
7791 TCGv_i32 fp0
= tcg_temp_new_i32();
7793 gen_load_fpr32h(ctx
, fp0
, rt
);
7794 tcg_gen_ext_i32_tl(t0
, fp0
);
7795 tcg_temp_free_i32(fp0
);
7799 /* XXX: For now we support only a single FPU context. */
7800 gen_helper_1e0i(cfc1
, t0
, rt
);
7802 /* COP2: Not implemented. */
7809 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
7810 gen_store_gpr(t0
, rd
);
7816 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7817 generate_exception_end(ctx
, EXCP_RI
);
7820 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7821 int u
, int sel
, int h
)
7823 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7824 TCGv t0
= tcg_temp_local_new();
7826 gen_load_gpr(t0
, rt
);
7827 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7828 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7829 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7831 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7832 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7839 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7842 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7852 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7855 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7858 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7861 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7864 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7867 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7870 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7873 gen_mtc0(ctx
, t0
, rd
, sel
);
7880 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7883 gen_mtc0(ctx
, t0
, rd
, sel
);
7889 gen_helper_mttc0_status(cpu_env
, t0
);
7892 gen_mtc0(ctx
, t0
, rd
, sel
);
7898 gen_helper_mttc0_cause(cpu_env
, t0
);
7908 gen_helper_mttc0_ebase(cpu_env
, t0
);
7918 gen_helper_mttc0_debug(cpu_env
, t0
);
7921 gen_mtc0(ctx
, t0
, rd
, sel
);
7926 gen_mtc0(ctx
, t0
, rd
, sel
);
7928 } else switch (sel
) {
7929 /* GPR registers. */
7931 gen_helper_0e1i(mttgpr
, t0
, rd
);
7933 /* Auxiliary CPU registers */
7937 gen_helper_0e1i(mttlo
, t0
, 0);
7940 gen_helper_0e1i(mtthi
, t0
, 0);
7943 gen_helper_0e1i(mttacx
, t0
, 0);
7946 gen_helper_0e1i(mttlo
, t0
, 1);
7949 gen_helper_0e1i(mtthi
, t0
, 1);
7952 gen_helper_0e1i(mttacx
, t0
, 1);
7955 gen_helper_0e1i(mttlo
, t0
, 2);
7958 gen_helper_0e1i(mtthi
, t0
, 2);
7961 gen_helper_0e1i(mttacx
, t0
, 2);
7964 gen_helper_0e1i(mttlo
, t0
, 3);
7967 gen_helper_0e1i(mtthi
, t0
, 3);
7970 gen_helper_0e1i(mttacx
, t0
, 3);
7973 gen_helper_mttdsp(cpu_env
, t0
);
7979 /* Floating point (COP1). */
7981 /* XXX: For now we support only a single FPU context. */
7983 TCGv_i32 fp0
= tcg_temp_new_i32();
7985 tcg_gen_trunc_tl_i32(fp0
, t0
);
7986 gen_store_fpr32(ctx
, fp0
, rd
);
7987 tcg_temp_free_i32(fp0
);
7989 TCGv_i32 fp0
= tcg_temp_new_i32();
7991 tcg_gen_trunc_tl_i32(fp0
, t0
);
7992 gen_store_fpr32h(ctx
, fp0
, rd
);
7993 tcg_temp_free_i32(fp0
);
7997 /* XXX: For now we support only a single FPU context. */
7999 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
8001 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8002 tcg_temp_free_i32(fs_tmp
);
8004 /* Stop translation as we may have changed hflags */
8005 ctx
->bstate
= BS_STOP
;
8007 /* COP2: Not implemented. */
8014 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
8020 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8021 generate_exception_end(ctx
, EXCP_RI
);
8024 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8026 const char *opn
= "ldst";
8028 check_cp0_enabled(ctx
);
8035 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8040 TCGv t0
= tcg_temp_new();
8042 gen_load_gpr(t0
, rt
);
8043 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8048 #if defined(TARGET_MIPS64)
8050 check_insn(ctx
, ISA_MIPS3
);
8055 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8059 check_insn(ctx
, ISA_MIPS3
);
8061 TCGv t0
= tcg_temp_new();
8063 gen_load_gpr(t0
, rt
);
8064 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8076 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8082 TCGv t0
= tcg_temp_new();
8083 gen_load_gpr(t0
, rt
);
8084 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8090 check_insn(ctx
, ASE_MT
);
8095 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8096 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8100 check_insn(ctx
, ASE_MT
);
8101 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8102 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8107 if (!env
->tlb
->helper_tlbwi
)
8109 gen_helper_tlbwi(cpu_env
);
8114 if (!env
->tlb
->helper_tlbinv
) {
8117 gen_helper_tlbinv(cpu_env
);
8118 } /* treat as nop if TLBINV not supported */
8123 if (!env
->tlb
->helper_tlbinvf
) {
8126 gen_helper_tlbinvf(cpu_env
);
8127 } /* treat as nop if TLBINV not supported */
8131 if (!env
->tlb
->helper_tlbwr
)
8133 gen_helper_tlbwr(cpu_env
);
8137 if (!env
->tlb
->helper_tlbp
)
8139 gen_helper_tlbp(cpu_env
);
8143 if (!env
->tlb
->helper_tlbr
)
8145 gen_helper_tlbr(cpu_env
);
8147 case OPC_ERET
: /* OPC_ERETNC */
8148 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8149 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8152 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8153 if (ctx
->opcode
& (1 << bit_shift
)) {
8156 check_insn(ctx
, ISA_MIPS32R5
);
8157 gen_helper_eretnc(cpu_env
);
8161 check_insn(ctx
, ISA_MIPS2
);
8162 gen_helper_eret(cpu_env
);
8164 ctx
->bstate
= BS_EXCP
;
8169 check_insn(ctx
, ISA_MIPS32
);
8170 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8171 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8174 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8176 generate_exception_end(ctx
, EXCP_RI
);
8178 gen_helper_deret(cpu_env
);
8179 ctx
->bstate
= BS_EXCP
;
8184 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8185 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8186 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8189 /* If we get an exception, we want to restart at next instruction */
8191 save_cpu_state(ctx
, 1);
8193 gen_helper_wait(cpu_env
);
8194 ctx
->bstate
= BS_EXCP
;
8199 generate_exception_end(ctx
, EXCP_RI
);
8202 (void)opn
; /* avoid a compiler warning */
8204 #endif /* !CONFIG_USER_ONLY */
8206 /* CP1 Branches (before delay slot) */
8207 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8208 int32_t cc
, int32_t offset
)
8210 target_ulong btarget
;
8211 TCGv_i32 t0
= tcg_temp_new_i32();
8213 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8214 generate_exception_end(ctx
, EXCP_RI
);
8219 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8221 btarget
= ctx
->pc
+ 4 + offset
;
8225 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8226 tcg_gen_not_i32(t0
, t0
);
8227 tcg_gen_andi_i32(t0
, t0
, 1);
8228 tcg_gen_extu_i32_tl(bcond
, t0
);
8231 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8232 tcg_gen_not_i32(t0
, t0
);
8233 tcg_gen_andi_i32(t0
, t0
, 1);
8234 tcg_gen_extu_i32_tl(bcond
, t0
);
8237 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8238 tcg_gen_andi_i32(t0
, t0
, 1);
8239 tcg_gen_extu_i32_tl(bcond
, t0
);
8242 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8243 tcg_gen_andi_i32(t0
, t0
, 1);
8244 tcg_gen_extu_i32_tl(bcond
, t0
);
8246 ctx
->hflags
|= MIPS_HFLAG_BL
;
8250 TCGv_i32 t1
= tcg_temp_new_i32();
8251 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8252 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8253 tcg_gen_nand_i32(t0
, t0
, t1
);
8254 tcg_temp_free_i32(t1
);
8255 tcg_gen_andi_i32(t0
, t0
, 1);
8256 tcg_gen_extu_i32_tl(bcond
, t0
);
8261 TCGv_i32 t1
= tcg_temp_new_i32();
8262 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8263 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8264 tcg_gen_or_i32(t0
, t0
, t1
);
8265 tcg_temp_free_i32(t1
);
8266 tcg_gen_andi_i32(t0
, t0
, 1);
8267 tcg_gen_extu_i32_tl(bcond
, t0
);
8272 TCGv_i32 t1
= tcg_temp_new_i32();
8273 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8274 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8275 tcg_gen_and_i32(t0
, t0
, t1
);
8276 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8277 tcg_gen_and_i32(t0
, t0
, t1
);
8278 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8279 tcg_gen_nand_i32(t0
, t0
, t1
);
8280 tcg_temp_free_i32(t1
);
8281 tcg_gen_andi_i32(t0
, t0
, 1);
8282 tcg_gen_extu_i32_tl(bcond
, t0
);
8287 TCGv_i32 t1
= tcg_temp_new_i32();
8288 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8289 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8290 tcg_gen_or_i32(t0
, t0
, t1
);
8291 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8292 tcg_gen_or_i32(t0
, t0
, t1
);
8293 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8294 tcg_gen_or_i32(t0
, t0
, t1
);
8295 tcg_temp_free_i32(t1
);
8296 tcg_gen_andi_i32(t0
, t0
, 1);
8297 tcg_gen_extu_i32_tl(bcond
, t0
);
8300 ctx
->hflags
|= MIPS_HFLAG_BC
;
8303 MIPS_INVAL("cp1 cond branch");
8304 generate_exception_end(ctx
, EXCP_RI
);
8307 ctx
->btarget
= btarget
;
8308 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8310 tcg_temp_free_i32(t0
);
8313 /* R6 CP1 Branches */
8314 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8315 int32_t ft
, int32_t offset
,
8318 target_ulong btarget
;
8319 TCGv_i64 t0
= tcg_temp_new_i64();
8321 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8322 #ifdef MIPS_DEBUG_DISAS
8323 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8326 generate_exception_end(ctx
, EXCP_RI
);
8330 gen_load_fpr64(ctx
, t0
, ft
);
8331 tcg_gen_andi_i64(t0
, t0
, 1);
8333 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8337 tcg_gen_xori_i64(t0
, t0
, 1);
8338 ctx
->hflags
|= MIPS_HFLAG_BC
;
8341 /* t0 already set */
8342 ctx
->hflags
|= MIPS_HFLAG_BC
;
8345 MIPS_INVAL("cp1 cond branch");
8346 generate_exception_end(ctx
, EXCP_RI
);
8350 tcg_gen_trunc_i64_tl(bcond
, t0
);
8352 ctx
->btarget
= btarget
;
8354 switch (delayslot_size
) {
8356 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8359 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8364 tcg_temp_free_i64(t0
);
8367 /* Coprocessor 1 (FPU) */
8369 #define FOP(func, fmt) (((fmt) << 21) | (func))
8372 OPC_ADD_S
= FOP(0, FMT_S
),
8373 OPC_SUB_S
= FOP(1, FMT_S
),
8374 OPC_MUL_S
= FOP(2, FMT_S
),
8375 OPC_DIV_S
= FOP(3, FMT_S
),
8376 OPC_SQRT_S
= FOP(4, FMT_S
),
8377 OPC_ABS_S
= FOP(5, FMT_S
),
8378 OPC_MOV_S
= FOP(6, FMT_S
),
8379 OPC_NEG_S
= FOP(7, FMT_S
),
8380 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8381 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8382 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8383 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8384 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8385 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8386 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8387 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8388 OPC_SEL_S
= FOP(16, FMT_S
),
8389 OPC_MOVCF_S
= FOP(17, FMT_S
),
8390 OPC_MOVZ_S
= FOP(18, FMT_S
),
8391 OPC_MOVN_S
= FOP(19, FMT_S
),
8392 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8393 OPC_RECIP_S
= FOP(21, FMT_S
),
8394 OPC_RSQRT_S
= FOP(22, FMT_S
),
8395 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8396 OPC_MADDF_S
= FOP(24, FMT_S
),
8397 OPC_MSUBF_S
= FOP(25, FMT_S
),
8398 OPC_RINT_S
= FOP(26, FMT_S
),
8399 OPC_CLASS_S
= FOP(27, FMT_S
),
8400 OPC_MIN_S
= FOP(28, FMT_S
),
8401 OPC_RECIP2_S
= FOP(28, FMT_S
),
8402 OPC_MINA_S
= FOP(29, FMT_S
),
8403 OPC_RECIP1_S
= FOP(29, FMT_S
),
8404 OPC_MAX_S
= FOP(30, FMT_S
),
8405 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8406 OPC_MAXA_S
= FOP(31, FMT_S
),
8407 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8408 OPC_CVT_D_S
= FOP(33, FMT_S
),
8409 OPC_CVT_W_S
= FOP(36, FMT_S
),
8410 OPC_CVT_L_S
= FOP(37, FMT_S
),
8411 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8412 OPC_CMP_F_S
= FOP (48, FMT_S
),
8413 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8414 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8415 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8416 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8417 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8418 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8419 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8420 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8421 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8422 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8423 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8424 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8425 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8426 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8427 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8429 OPC_ADD_D
= FOP(0, FMT_D
),
8430 OPC_SUB_D
= FOP(1, FMT_D
),
8431 OPC_MUL_D
= FOP(2, FMT_D
),
8432 OPC_DIV_D
= FOP(3, FMT_D
),
8433 OPC_SQRT_D
= FOP(4, FMT_D
),
8434 OPC_ABS_D
= FOP(5, FMT_D
),
8435 OPC_MOV_D
= FOP(6, FMT_D
),
8436 OPC_NEG_D
= FOP(7, FMT_D
),
8437 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8438 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8439 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8440 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8441 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8442 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8443 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8444 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8445 OPC_SEL_D
= FOP(16, FMT_D
),
8446 OPC_MOVCF_D
= FOP(17, FMT_D
),
8447 OPC_MOVZ_D
= FOP(18, FMT_D
),
8448 OPC_MOVN_D
= FOP(19, FMT_D
),
8449 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8450 OPC_RECIP_D
= FOP(21, FMT_D
),
8451 OPC_RSQRT_D
= FOP(22, FMT_D
),
8452 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8453 OPC_MADDF_D
= FOP(24, FMT_D
),
8454 OPC_MSUBF_D
= FOP(25, FMT_D
),
8455 OPC_RINT_D
= FOP(26, FMT_D
),
8456 OPC_CLASS_D
= FOP(27, FMT_D
),
8457 OPC_MIN_D
= FOP(28, FMT_D
),
8458 OPC_RECIP2_D
= FOP(28, FMT_D
),
8459 OPC_MINA_D
= FOP(29, FMT_D
),
8460 OPC_RECIP1_D
= FOP(29, FMT_D
),
8461 OPC_MAX_D
= FOP(30, FMT_D
),
8462 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8463 OPC_MAXA_D
= FOP(31, FMT_D
),
8464 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8465 OPC_CVT_S_D
= FOP(32, FMT_D
),
8466 OPC_CVT_W_D
= FOP(36, FMT_D
),
8467 OPC_CVT_L_D
= FOP(37, FMT_D
),
8468 OPC_CMP_F_D
= FOP (48, FMT_D
),
8469 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8470 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8471 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8472 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8473 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8474 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8475 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8476 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8477 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8478 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8479 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8480 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8481 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8482 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8483 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8485 OPC_CVT_S_W
= FOP(32, FMT_W
),
8486 OPC_CVT_D_W
= FOP(33, FMT_W
),
8487 OPC_CVT_S_L
= FOP(32, FMT_L
),
8488 OPC_CVT_D_L
= FOP(33, FMT_L
),
8489 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8491 OPC_ADD_PS
= FOP(0, FMT_PS
),
8492 OPC_SUB_PS
= FOP(1, FMT_PS
),
8493 OPC_MUL_PS
= FOP(2, FMT_PS
),
8494 OPC_DIV_PS
= FOP(3, FMT_PS
),
8495 OPC_ABS_PS
= FOP(5, FMT_PS
),
8496 OPC_MOV_PS
= FOP(6, FMT_PS
),
8497 OPC_NEG_PS
= FOP(7, FMT_PS
),
8498 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8499 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8500 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8501 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8502 OPC_MULR_PS
= FOP(26, FMT_PS
),
8503 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8504 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8505 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8506 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8508 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8509 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8510 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8511 OPC_PLL_PS
= FOP(44, FMT_PS
),
8512 OPC_PLU_PS
= FOP(45, FMT_PS
),
8513 OPC_PUL_PS
= FOP(46, FMT_PS
),
8514 OPC_PUU_PS
= FOP(47, FMT_PS
),
8515 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8516 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8517 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8518 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8519 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8520 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8521 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8522 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8523 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8524 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8525 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8526 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8527 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8528 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8529 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8530 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8534 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8535 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8536 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8537 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8538 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8539 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8540 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8541 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8542 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8543 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8544 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8545 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8546 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8547 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8548 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8549 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8550 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8551 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8552 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8553 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8554 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8555 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8557 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8558 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8559 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8560 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8561 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8562 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8563 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8564 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8565 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8566 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8567 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8568 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8569 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8570 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8571 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8572 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8573 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8574 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8575 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8576 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8577 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8578 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8580 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8582 TCGv t0
= tcg_temp_new();
8587 TCGv_i32 fp0
= tcg_temp_new_i32();
8589 gen_load_fpr32(ctx
, fp0
, fs
);
8590 tcg_gen_ext_i32_tl(t0
, fp0
);
8591 tcg_temp_free_i32(fp0
);
8593 gen_store_gpr(t0
, rt
);
8596 gen_load_gpr(t0
, rt
);
8598 TCGv_i32 fp0
= tcg_temp_new_i32();
8600 tcg_gen_trunc_tl_i32(fp0
, t0
);
8601 gen_store_fpr32(ctx
, fp0
, fs
);
8602 tcg_temp_free_i32(fp0
);
8606 gen_helper_1e0i(cfc1
, t0
, fs
);
8607 gen_store_gpr(t0
, rt
);
8610 gen_load_gpr(t0
, rt
);
8611 save_cpu_state(ctx
, 0);
8613 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8615 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8616 tcg_temp_free_i32(fs_tmp
);
8618 /* Stop translation as we may have changed hflags */
8619 ctx
->bstate
= BS_STOP
;
8621 #if defined(TARGET_MIPS64)
8623 gen_load_fpr64(ctx
, t0
, fs
);
8624 gen_store_gpr(t0
, rt
);
8627 gen_load_gpr(t0
, rt
);
8628 gen_store_fpr64(ctx
, t0
, fs
);
8633 TCGv_i32 fp0
= tcg_temp_new_i32();
8635 gen_load_fpr32h(ctx
, fp0
, fs
);
8636 tcg_gen_ext_i32_tl(t0
, fp0
);
8637 tcg_temp_free_i32(fp0
);
8639 gen_store_gpr(t0
, rt
);
8642 gen_load_gpr(t0
, rt
);
8644 TCGv_i32 fp0
= tcg_temp_new_i32();
8646 tcg_gen_trunc_tl_i32(fp0
, t0
);
8647 gen_store_fpr32h(ctx
, fp0
, fs
);
8648 tcg_temp_free_i32(fp0
);
8652 MIPS_INVAL("cp1 move");
8653 generate_exception_end(ctx
, EXCP_RI
);
8661 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8677 l1
= gen_new_label();
8678 t0
= tcg_temp_new_i32();
8679 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8680 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8681 tcg_temp_free_i32(t0
);
8683 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8685 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8690 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8694 TCGv_i32 t0
= tcg_temp_new_i32();
8695 TCGLabel
*l1
= gen_new_label();
8702 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8703 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8704 gen_load_fpr32(ctx
, t0
, fs
);
8705 gen_store_fpr32(ctx
, t0
, fd
);
8707 tcg_temp_free_i32(t0
);
8710 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8713 TCGv_i32 t0
= tcg_temp_new_i32();
8715 TCGLabel
*l1
= gen_new_label();
8722 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8723 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8724 tcg_temp_free_i32(t0
);
8725 fp0
= tcg_temp_new_i64();
8726 gen_load_fpr64(ctx
, fp0
, fs
);
8727 gen_store_fpr64(ctx
, fp0
, fd
);
8728 tcg_temp_free_i64(fp0
);
8732 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8736 TCGv_i32 t0
= tcg_temp_new_i32();
8737 TCGLabel
*l1
= gen_new_label();
8738 TCGLabel
*l2
= gen_new_label();
8745 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8746 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8747 gen_load_fpr32(ctx
, t0
, fs
);
8748 gen_store_fpr32(ctx
, t0
, fd
);
8751 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8752 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8753 gen_load_fpr32h(ctx
, t0
, fs
);
8754 gen_store_fpr32h(ctx
, t0
, fd
);
8755 tcg_temp_free_i32(t0
);
8759 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8762 TCGv_i32 t1
= tcg_const_i32(0);
8763 TCGv_i32 fp0
= tcg_temp_new_i32();
8764 TCGv_i32 fp1
= tcg_temp_new_i32();
8765 TCGv_i32 fp2
= tcg_temp_new_i32();
8766 gen_load_fpr32(ctx
, fp0
, fd
);
8767 gen_load_fpr32(ctx
, fp1
, ft
);
8768 gen_load_fpr32(ctx
, fp2
, fs
);
8772 tcg_gen_andi_i32(fp0
, fp0
, 1);
8773 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8776 tcg_gen_andi_i32(fp1
, fp1
, 1);
8777 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8780 tcg_gen_andi_i32(fp1
, fp1
, 1);
8781 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8784 MIPS_INVAL("gen_sel_s");
8785 generate_exception_end(ctx
, EXCP_RI
);
8789 gen_store_fpr32(ctx
, fp0
, fd
);
8790 tcg_temp_free_i32(fp2
);
8791 tcg_temp_free_i32(fp1
);
8792 tcg_temp_free_i32(fp0
);
8793 tcg_temp_free_i32(t1
);
8796 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8799 TCGv_i64 t1
= tcg_const_i64(0);
8800 TCGv_i64 fp0
= tcg_temp_new_i64();
8801 TCGv_i64 fp1
= tcg_temp_new_i64();
8802 TCGv_i64 fp2
= tcg_temp_new_i64();
8803 gen_load_fpr64(ctx
, fp0
, fd
);
8804 gen_load_fpr64(ctx
, fp1
, ft
);
8805 gen_load_fpr64(ctx
, fp2
, fs
);
8809 tcg_gen_andi_i64(fp0
, fp0
, 1);
8810 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8813 tcg_gen_andi_i64(fp1
, fp1
, 1);
8814 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8817 tcg_gen_andi_i64(fp1
, fp1
, 1);
8818 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8821 MIPS_INVAL("gen_sel_d");
8822 generate_exception_end(ctx
, EXCP_RI
);
8826 gen_store_fpr64(ctx
, fp0
, fd
);
8827 tcg_temp_free_i64(fp2
);
8828 tcg_temp_free_i64(fp1
);
8829 tcg_temp_free_i64(fp0
);
8830 tcg_temp_free_i64(t1
);
8833 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8834 int ft
, int fs
, int fd
, int cc
)
8836 uint32_t func
= ctx
->opcode
& 0x3f;
8840 TCGv_i32 fp0
= tcg_temp_new_i32();
8841 TCGv_i32 fp1
= tcg_temp_new_i32();
8843 gen_load_fpr32(ctx
, fp0
, fs
);
8844 gen_load_fpr32(ctx
, fp1
, ft
);
8845 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8846 tcg_temp_free_i32(fp1
);
8847 gen_store_fpr32(ctx
, fp0
, fd
);
8848 tcg_temp_free_i32(fp0
);
8853 TCGv_i32 fp0
= tcg_temp_new_i32();
8854 TCGv_i32 fp1
= tcg_temp_new_i32();
8856 gen_load_fpr32(ctx
, fp0
, fs
);
8857 gen_load_fpr32(ctx
, fp1
, ft
);
8858 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8859 tcg_temp_free_i32(fp1
);
8860 gen_store_fpr32(ctx
, fp0
, fd
);
8861 tcg_temp_free_i32(fp0
);
8866 TCGv_i32 fp0
= tcg_temp_new_i32();
8867 TCGv_i32 fp1
= tcg_temp_new_i32();
8869 gen_load_fpr32(ctx
, fp0
, fs
);
8870 gen_load_fpr32(ctx
, fp1
, ft
);
8871 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8872 tcg_temp_free_i32(fp1
);
8873 gen_store_fpr32(ctx
, fp0
, fd
);
8874 tcg_temp_free_i32(fp0
);
8879 TCGv_i32 fp0
= tcg_temp_new_i32();
8880 TCGv_i32 fp1
= tcg_temp_new_i32();
8882 gen_load_fpr32(ctx
, fp0
, fs
);
8883 gen_load_fpr32(ctx
, fp1
, ft
);
8884 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8885 tcg_temp_free_i32(fp1
);
8886 gen_store_fpr32(ctx
, fp0
, fd
);
8887 tcg_temp_free_i32(fp0
);
8892 TCGv_i32 fp0
= tcg_temp_new_i32();
8894 gen_load_fpr32(ctx
, fp0
, fs
);
8895 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8896 gen_store_fpr32(ctx
, fp0
, fd
);
8897 tcg_temp_free_i32(fp0
);
8902 TCGv_i32 fp0
= tcg_temp_new_i32();
8904 gen_load_fpr32(ctx
, fp0
, fs
);
8906 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
8908 gen_helper_float_abs_s(fp0
, fp0
);
8910 gen_store_fpr32(ctx
, fp0
, fd
);
8911 tcg_temp_free_i32(fp0
);
8916 TCGv_i32 fp0
= tcg_temp_new_i32();
8918 gen_load_fpr32(ctx
, fp0
, fs
);
8919 gen_store_fpr32(ctx
, fp0
, fd
);
8920 tcg_temp_free_i32(fp0
);
8925 TCGv_i32 fp0
= tcg_temp_new_i32();
8927 gen_load_fpr32(ctx
, fp0
, fs
);
8929 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
8931 gen_helper_float_chs_s(fp0
, fp0
);
8933 gen_store_fpr32(ctx
, fp0
, fd
);
8934 tcg_temp_free_i32(fp0
);
8938 check_cp1_64bitmode(ctx
);
8940 TCGv_i32 fp32
= tcg_temp_new_i32();
8941 TCGv_i64 fp64
= tcg_temp_new_i64();
8943 gen_load_fpr32(ctx
, fp32
, fs
);
8945 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
8947 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
8949 tcg_temp_free_i32(fp32
);
8950 gen_store_fpr64(ctx
, fp64
, fd
);
8951 tcg_temp_free_i64(fp64
);
8955 check_cp1_64bitmode(ctx
);
8957 TCGv_i32 fp32
= tcg_temp_new_i32();
8958 TCGv_i64 fp64
= tcg_temp_new_i64();
8960 gen_load_fpr32(ctx
, fp32
, fs
);
8962 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
8964 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
8966 tcg_temp_free_i32(fp32
);
8967 gen_store_fpr64(ctx
, fp64
, fd
);
8968 tcg_temp_free_i64(fp64
);
8972 check_cp1_64bitmode(ctx
);
8974 TCGv_i32 fp32
= tcg_temp_new_i32();
8975 TCGv_i64 fp64
= tcg_temp_new_i64();
8977 gen_load_fpr32(ctx
, fp32
, fs
);
8979 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
8981 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
8983 tcg_temp_free_i32(fp32
);
8984 gen_store_fpr64(ctx
, fp64
, fd
);
8985 tcg_temp_free_i64(fp64
);
8989 check_cp1_64bitmode(ctx
);
8991 TCGv_i32 fp32
= tcg_temp_new_i32();
8992 TCGv_i64 fp64
= tcg_temp_new_i64();
8994 gen_load_fpr32(ctx
, fp32
, fs
);
8996 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
8998 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
9000 tcg_temp_free_i32(fp32
);
9001 gen_store_fpr64(ctx
, fp64
, fd
);
9002 tcg_temp_free_i64(fp64
);
9007 TCGv_i32 fp0
= tcg_temp_new_i32();
9009 gen_load_fpr32(ctx
, fp0
, fs
);
9011 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9013 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9015 gen_store_fpr32(ctx
, fp0
, fd
);
9016 tcg_temp_free_i32(fp0
);
9021 TCGv_i32 fp0
= tcg_temp_new_i32();
9023 gen_load_fpr32(ctx
, fp0
, fs
);
9025 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9027 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9029 gen_store_fpr32(ctx
, fp0
, fd
);
9030 tcg_temp_free_i32(fp0
);
9035 TCGv_i32 fp0
= tcg_temp_new_i32();
9037 gen_load_fpr32(ctx
, fp0
, fs
);
9039 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9041 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9043 gen_store_fpr32(ctx
, fp0
, fd
);
9044 tcg_temp_free_i32(fp0
);
9049 TCGv_i32 fp0
= tcg_temp_new_i32();
9051 gen_load_fpr32(ctx
, fp0
, fs
);
9053 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9055 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9057 gen_store_fpr32(ctx
, fp0
, fd
);
9058 tcg_temp_free_i32(fp0
);
9062 check_insn(ctx
, ISA_MIPS32R6
);
9063 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9066 check_insn(ctx
, ISA_MIPS32R6
);
9067 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9070 check_insn(ctx
, ISA_MIPS32R6
);
9071 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9074 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9075 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9078 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9080 TCGLabel
*l1
= gen_new_label();
9084 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9086 fp0
= tcg_temp_new_i32();
9087 gen_load_fpr32(ctx
, fp0
, fs
);
9088 gen_store_fpr32(ctx
, fp0
, fd
);
9089 tcg_temp_free_i32(fp0
);
9094 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9096 TCGLabel
*l1
= gen_new_label();
9100 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9101 fp0
= tcg_temp_new_i32();
9102 gen_load_fpr32(ctx
, fp0
, fs
);
9103 gen_store_fpr32(ctx
, fp0
, fd
);
9104 tcg_temp_free_i32(fp0
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9113 gen_load_fpr32(ctx
, fp0
, fs
);
9114 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9115 gen_store_fpr32(ctx
, fp0
, fd
);
9116 tcg_temp_free_i32(fp0
);
9121 TCGv_i32 fp0
= tcg_temp_new_i32();
9123 gen_load_fpr32(ctx
, fp0
, fs
);
9124 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9125 gen_store_fpr32(ctx
, fp0
, fd
);
9126 tcg_temp_free_i32(fp0
);
9130 check_insn(ctx
, ISA_MIPS32R6
);
9132 TCGv_i32 fp0
= tcg_temp_new_i32();
9133 TCGv_i32 fp1
= tcg_temp_new_i32();
9134 TCGv_i32 fp2
= tcg_temp_new_i32();
9135 gen_load_fpr32(ctx
, fp0
, fs
);
9136 gen_load_fpr32(ctx
, fp1
, ft
);
9137 gen_load_fpr32(ctx
, fp2
, fd
);
9138 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9139 gen_store_fpr32(ctx
, fp2
, fd
);
9140 tcg_temp_free_i32(fp2
);
9141 tcg_temp_free_i32(fp1
);
9142 tcg_temp_free_i32(fp0
);
9146 check_insn(ctx
, ISA_MIPS32R6
);
9148 TCGv_i32 fp0
= tcg_temp_new_i32();
9149 TCGv_i32 fp1
= tcg_temp_new_i32();
9150 TCGv_i32 fp2
= tcg_temp_new_i32();
9151 gen_load_fpr32(ctx
, fp0
, fs
);
9152 gen_load_fpr32(ctx
, fp1
, ft
);
9153 gen_load_fpr32(ctx
, fp2
, fd
);
9154 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9155 gen_store_fpr32(ctx
, fp2
, fd
);
9156 tcg_temp_free_i32(fp2
);
9157 tcg_temp_free_i32(fp1
);
9158 tcg_temp_free_i32(fp0
);
9162 check_insn(ctx
, ISA_MIPS32R6
);
9164 TCGv_i32 fp0
= tcg_temp_new_i32();
9165 gen_load_fpr32(ctx
, fp0
, fs
);
9166 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9167 gen_store_fpr32(ctx
, fp0
, fd
);
9168 tcg_temp_free_i32(fp0
);
9172 check_insn(ctx
, ISA_MIPS32R6
);
9174 TCGv_i32 fp0
= tcg_temp_new_i32();
9175 gen_load_fpr32(ctx
, fp0
, fs
);
9176 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9177 gen_store_fpr32(ctx
, fp0
, fd
);
9178 tcg_temp_free_i32(fp0
);
9181 case OPC_MIN_S
: /* OPC_RECIP2_S */
9182 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9184 TCGv_i32 fp0
= tcg_temp_new_i32();
9185 TCGv_i32 fp1
= tcg_temp_new_i32();
9186 TCGv_i32 fp2
= tcg_temp_new_i32();
9187 gen_load_fpr32(ctx
, fp0
, fs
);
9188 gen_load_fpr32(ctx
, fp1
, ft
);
9189 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9190 gen_store_fpr32(ctx
, fp2
, fd
);
9191 tcg_temp_free_i32(fp2
);
9192 tcg_temp_free_i32(fp1
);
9193 tcg_temp_free_i32(fp0
);
9196 check_cp1_64bitmode(ctx
);
9198 TCGv_i32 fp0
= tcg_temp_new_i32();
9199 TCGv_i32 fp1
= tcg_temp_new_i32();
9201 gen_load_fpr32(ctx
, fp0
, fs
);
9202 gen_load_fpr32(ctx
, fp1
, ft
);
9203 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9204 tcg_temp_free_i32(fp1
);
9205 gen_store_fpr32(ctx
, fp0
, fd
);
9206 tcg_temp_free_i32(fp0
);
9210 case OPC_MINA_S
: /* OPC_RECIP1_S */
9211 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9213 TCGv_i32 fp0
= tcg_temp_new_i32();
9214 TCGv_i32 fp1
= tcg_temp_new_i32();
9215 TCGv_i32 fp2
= tcg_temp_new_i32();
9216 gen_load_fpr32(ctx
, fp0
, fs
);
9217 gen_load_fpr32(ctx
, fp1
, ft
);
9218 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9219 gen_store_fpr32(ctx
, fp2
, fd
);
9220 tcg_temp_free_i32(fp2
);
9221 tcg_temp_free_i32(fp1
);
9222 tcg_temp_free_i32(fp0
);
9225 check_cp1_64bitmode(ctx
);
9227 TCGv_i32 fp0
= tcg_temp_new_i32();
9229 gen_load_fpr32(ctx
, fp0
, fs
);
9230 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9231 gen_store_fpr32(ctx
, fp0
, fd
);
9232 tcg_temp_free_i32(fp0
);
9236 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9237 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9239 TCGv_i32 fp0
= tcg_temp_new_i32();
9240 TCGv_i32 fp1
= tcg_temp_new_i32();
9241 gen_load_fpr32(ctx
, fp0
, fs
);
9242 gen_load_fpr32(ctx
, fp1
, ft
);
9243 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9244 gen_store_fpr32(ctx
, fp1
, fd
);
9245 tcg_temp_free_i32(fp1
);
9246 tcg_temp_free_i32(fp0
);
9249 check_cp1_64bitmode(ctx
);
9251 TCGv_i32 fp0
= tcg_temp_new_i32();
9253 gen_load_fpr32(ctx
, fp0
, fs
);
9254 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9255 gen_store_fpr32(ctx
, fp0
, fd
);
9256 tcg_temp_free_i32(fp0
);
9260 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9261 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9263 TCGv_i32 fp0
= tcg_temp_new_i32();
9264 TCGv_i32 fp1
= tcg_temp_new_i32();
9265 gen_load_fpr32(ctx
, fp0
, fs
);
9266 gen_load_fpr32(ctx
, fp1
, ft
);
9267 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9268 gen_store_fpr32(ctx
, fp1
, fd
);
9269 tcg_temp_free_i32(fp1
);
9270 tcg_temp_free_i32(fp0
);
9273 check_cp1_64bitmode(ctx
);
9275 TCGv_i32 fp0
= tcg_temp_new_i32();
9276 TCGv_i32 fp1
= tcg_temp_new_i32();
9278 gen_load_fpr32(ctx
, fp0
, fs
);
9279 gen_load_fpr32(ctx
, fp1
, ft
);
9280 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9281 tcg_temp_free_i32(fp1
);
9282 gen_store_fpr32(ctx
, fp0
, fd
);
9283 tcg_temp_free_i32(fp0
);
9288 check_cp1_registers(ctx
, fd
);
9290 TCGv_i32 fp32
= tcg_temp_new_i32();
9291 TCGv_i64 fp64
= tcg_temp_new_i64();
9293 gen_load_fpr32(ctx
, fp32
, fs
);
9294 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9295 tcg_temp_free_i32(fp32
);
9296 gen_store_fpr64(ctx
, fp64
, fd
);
9297 tcg_temp_free_i64(fp64
);
9302 TCGv_i32 fp0
= tcg_temp_new_i32();
9304 gen_load_fpr32(ctx
, fp0
, fs
);
9306 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9308 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9310 gen_store_fpr32(ctx
, fp0
, fd
);
9311 tcg_temp_free_i32(fp0
);
9315 check_cp1_64bitmode(ctx
);
9317 TCGv_i32 fp32
= tcg_temp_new_i32();
9318 TCGv_i64 fp64
= tcg_temp_new_i64();
9320 gen_load_fpr32(ctx
, fp32
, fs
);
9322 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9324 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9326 tcg_temp_free_i32(fp32
);
9327 gen_store_fpr64(ctx
, fp64
, fd
);
9328 tcg_temp_free_i64(fp64
);
9334 TCGv_i64 fp64
= tcg_temp_new_i64();
9335 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9336 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9338 gen_load_fpr32(ctx
, fp32_0
, fs
);
9339 gen_load_fpr32(ctx
, fp32_1
, ft
);
9340 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9341 tcg_temp_free_i32(fp32_1
);
9342 tcg_temp_free_i32(fp32_0
);
9343 gen_store_fpr64(ctx
, fp64
, fd
);
9344 tcg_temp_free_i64(fp64
);
9356 case OPC_CMP_NGLE_S
:
9363 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9364 if (ctx
->opcode
& (1 << 6)) {
9365 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9367 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9371 check_cp1_registers(ctx
, fs
| ft
| fd
);
9373 TCGv_i64 fp0
= tcg_temp_new_i64();
9374 TCGv_i64 fp1
= tcg_temp_new_i64();
9376 gen_load_fpr64(ctx
, fp0
, fs
);
9377 gen_load_fpr64(ctx
, fp1
, ft
);
9378 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9379 tcg_temp_free_i64(fp1
);
9380 gen_store_fpr64(ctx
, fp0
, fd
);
9381 tcg_temp_free_i64(fp0
);
9385 check_cp1_registers(ctx
, fs
| ft
| fd
);
9387 TCGv_i64 fp0
= tcg_temp_new_i64();
9388 TCGv_i64 fp1
= tcg_temp_new_i64();
9390 gen_load_fpr64(ctx
, fp0
, fs
);
9391 gen_load_fpr64(ctx
, fp1
, ft
);
9392 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9393 tcg_temp_free_i64(fp1
);
9394 gen_store_fpr64(ctx
, fp0
, fd
);
9395 tcg_temp_free_i64(fp0
);
9399 check_cp1_registers(ctx
, fs
| ft
| fd
);
9401 TCGv_i64 fp0
= tcg_temp_new_i64();
9402 TCGv_i64 fp1
= tcg_temp_new_i64();
9404 gen_load_fpr64(ctx
, fp0
, fs
);
9405 gen_load_fpr64(ctx
, fp1
, ft
);
9406 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9407 tcg_temp_free_i64(fp1
);
9408 gen_store_fpr64(ctx
, fp0
, fd
);
9409 tcg_temp_free_i64(fp0
);
9413 check_cp1_registers(ctx
, fs
| ft
| fd
);
9415 TCGv_i64 fp0
= tcg_temp_new_i64();
9416 TCGv_i64 fp1
= tcg_temp_new_i64();
9418 gen_load_fpr64(ctx
, fp0
, fs
);
9419 gen_load_fpr64(ctx
, fp1
, ft
);
9420 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9421 tcg_temp_free_i64(fp1
);
9422 gen_store_fpr64(ctx
, fp0
, fd
);
9423 tcg_temp_free_i64(fp0
);
9427 check_cp1_registers(ctx
, fs
| fd
);
9429 TCGv_i64 fp0
= tcg_temp_new_i64();
9431 gen_load_fpr64(ctx
, fp0
, fs
);
9432 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9433 gen_store_fpr64(ctx
, fp0
, fd
);
9434 tcg_temp_free_i64(fp0
);
9438 check_cp1_registers(ctx
, fs
| fd
);
9440 TCGv_i64 fp0
= tcg_temp_new_i64();
9442 gen_load_fpr64(ctx
, fp0
, fs
);
9444 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9446 gen_helper_float_abs_d(fp0
, fp0
);
9448 gen_store_fpr64(ctx
, fp0
, fd
);
9449 tcg_temp_free_i64(fp0
);
9453 check_cp1_registers(ctx
, fs
| fd
);
9455 TCGv_i64 fp0
= tcg_temp_new_i64();
9457 gen_load_fpr64(ctx
, fp0
, fs
);
9458 gen_store_fpr64(ctx
, fp0
, fd
);
9459 tcg_temp_free_i64(fp0
);
9463 check_cp1_registers(ctx
, fs
| fd
);
9465 TCGv_i64 fp0
= tcg_temp_new_i64();
9467 gen_load_fpr64(ctx
, fp0
, fs
);
9469 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9471 gen_helper_float_chs_d(fp0
, fp0
);
9473 gen_store_fpr64(ctx
, fp0
, fd
);
9474 tcg_temp_free_i64(fp0
);
9478 check_cp1_64bitmode(ctx
);
9480 TCGv_i64 fp0
= tcg_temp_new_i64();
9482 gen_load_fpr64(ctx
, fp0
, fs
);
9484 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9486 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9488 gen_store_fpr64(ctx
, fp0
, fd
);
9489 tcg_temp_free_i64(fp0
);
9493 check_cp1_64bitmode(ctx
);
9495 TCGv_i64 fp0
= tcg_temp_new_i64();
9497 gen_load_fpr64(ctx
, fp0
, fs
);
9499 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9501 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9503 gen_store_fpr64(ctx
, fp0
, fd
);
9504 tcg_temp_free_i64(fp0
);
9508 check_cp1_64bitmode(ctx
);
9510 TCGv_i64 fp0
= tcg_temp_new_i64();
9512 gen_load_fpr64(ctx
, fp0
, fs
);
9514 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9516 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9518 gen_store_fpr64(ctx
, fp0
, fd
);
9519 tcg_temp_free_i64(fp0
);
9523 check_cp1_64bitmode(ctx
);
9525 TCGv_i64 fp0
= tcg_temp_new_i64();
9527 gen_load_fpr64(ctx
, fp0
, fs
);
9529 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9531 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9533 gen_store_fpr64(ctx
, fp0
, fd
);
9534 tcg_temp_free_i64(fp0
);
9538 check_cp1_registers(ctx
, fs
);
9540 TCGv_i32 fp32
= tcg_temp_new_i32();
9541 TCGv_i64 fp64
= tcg_temp_new_i64();
9543 gen_load_fpr64(ctx
, fp64
, fs
);
9545 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9547 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9549 tcg_temp_free_i64(fp64
);
9550 gen_store_fpr32(ctx
, fp32
, fd
);
9551 tcg_temp_free_i32(fp32
);
9555 check_cp1_registers(ctx
, fs
);
9557 TCGv_i32 fp32
= tcg_temp_new_i32();
9558 TCGv_i64 fp64
= tcg_temp_new_i64();
9560 gen_load_fpr64(ctx
, fp64
, fs
);
9562 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9564 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9566 tcg_temp_free_i64(fp64
);
9567 gen_store_fpr32(ctx
, fp32
, fd
);
9568 tcg_temp_free_i32(fp32
);
9572 check_cp1_registers(ctx
, fs
);
9574 TCGv_i32 fp32
= tcg_temp_new_i32();
9575 TCGv_i64 fp64
= tcg_temp_new_i64();
9577 gen_load_fpr64(ctx
, fp64
, fs
);
9579 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9581 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9583 tcg_temp_free_i64(fp64
);
9584 gen_store_fpr32(ctx
, fp32
, fd
);
9585 tcg_temp_free_i32(fp32
);
9589 check_cp1_registers(ctx
, fs
);
9591 TCGv_i32 fp32
= tcg_temp_new_i32();
9592 TCGv_i64 fp64
= tcg_temp_new_i64();
9594 gen_load_fpr64(ctx
, fp64
, fs
);
9596 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9598 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9600 tcg_temp_free_i64(fp64
);
9601 gen_store_fpr32(ctx
, fp32
, fd
);
9602 tcg_temp_free_i32(fp32
);
9606 check_insn(ctx
, ISA_MIPS32R6
);
9607 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9610 check_insn(ctx
, ISA_MIPS32R6
);
9611 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9614 check_insn(ctx
, ISA_MIPS32R6
);
9615 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9619 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9624 TCGLabel
*l1
= gen_new_label();
9628 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9630 fp0
= tcg_temp_new_i64();
9631 gen_load_fpr64(ctx
, fp0
, fs
);
9632 gen_store_fpr64(ctx
, fp0
, fd
);
9633 tcg_temp_free_i64(fp0
);
9638 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9640 TCGLabel
*l1
= gen_new_label();
9644 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9645 fp0
= tcg_temp_new_i64();
9646 gen_load_fpr64(ctx
, fp0
, fs
);
9647 gen_store_fpr64(ctx
, fp0
, fd
);
9648 tcg_temp_free_i64(fp0
);
9654 check_cp1_registers(ctx
, fs
| fd
);
9656 TCGv_i64 fp0
= tcg_temp_new_i64();
9658 gen_load_fpr64(ctx
, fp0
, fs
);
9659 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9660 gen_store_fpr64(ctx
, fp0
, fd
);
9661 tcg_temp_free_i64(fp0
);
9665 check_cp1_registers(ctx
, fs
| fd
);
9667 TCGv_i64 fp0
= tcg_temp_new_i64();
9669 gen_load_fpr64(ctx
, fp0
, fs
);
9670 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9671 gen_store_fpr64(ctx
, fp0
, fd
);
9672 tcg_temp_free_i64(fp0
);
9676 check_insn(ctx
, ISA_MIPS32R6
);
9678 TCGv_i64 fp0
= tcg_temp_new_i64();
9679 TCGv_i64 fp1
= tcg_temp_new_i64();
9680 TCGv_i64 fp2
= tcg_temp_new_i64();
9681 gen_load_fpr64(ctx
, fp0
, fs
);
9682 gen_load_fpr64(ctx
, fp1
, ft
);
9683 gen_load_fpr64(ctx
, fp2
, fd
);
9684 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9685 gen_store_fpr64(ctx
, fp2
, fd
);
9686 tcg_temp_free_i64(fp2
);
9687 tcg_temp_free_i64(fp1
);
9688 tcg_temp_free_i64(fp0
);
9692 check_insn(ctx
, ISA_MIPS32R6
);
9694 TCGv_i64 fp0
= tcg_temp_new_i64();
9695 TCGv_i64 fp1
= tcg_temp_new_i64();
9696 TCGv_i64 fp2
= tcg_temp_new_i64();
9697 gen_load_fpr64(ctx
, fp0
, fs
);
9698 gen_load_fpr64(ctx
, fp1
, ft
);
9699 gen_load_fpr64(ctx
, fp2
, fd
);
9700 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9701 gen_store_fpr64(ctx
, fp2
, fd
);
9702 tcg_temp_free_i64(fp2
);
9703 tcg_temp_free_i64(fp1
);
9704 tcg_temp_free_i64(fp0
);
9708 check_insn(ctx
, ISA_MIPS32R6
);
9710 TCGv_i64 fp0
= tcg_temp_new_i64();
9711 gen_load_fpr64(ctx
, fp0
, fs
);
9712 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9713 gen_store_fpr64(ctx
, fp0
, fd
);
9714 tcg_temp_free_i64(fp0
);
9718 check_insn(ctx
, ISA_MIPS32R6
);
9720 TCGv_i64 fp0
= tcg_temp_new_i64();
9721 gen_load_fpr64(ctx
, fp0
, fs
);
9722 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9723 gen_store_fpr64(ctx
, fp0
, fd
);
9724 tcg_temp_free_i64(fp0
);
9727 case OPC_MIN_D
: /* OPC_RECIP2_D */
9728 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9730 TCGv_i64 fp0
= tcg_temp_new_i64();
9731 TCGv_i64 fp1
= tcg_temp_new_i64();
9732 gen_load_fpr64(ctx
, fp0
, fs
);
9733 gen_load_fpr64(ctx
, fp1
, ft
);
9734 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9735 gen_store_fpr64(ctx
, fp1
, fd
);
9736 tcg_temp_free_i64(fp1
);
9737 tcg_temp_free_i64(fp0
);
9740 check_cp1_64bitmode(ctx
);
9742 TCGv_i64 fp0
= tcg_temp_new_i64();
9743 TCGv_i64 fp1
= tcg_temp_new_i64();
9745 gen_load_fpr64(ctx
, fp0
, fs
);
9746 gen_load_fpr64(ctx
, fp1
, ft
);
9747 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9748 tcg_temp_free_i64(fp1
);
9749 gen_store_fpr64(ctx
, fp0
, fd
);
9750 tcg_temp_free_i64(fp0
);
9754 case OPC_MINA_D
: /* OPC_RECIP1_D */
9755 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9757 TCGv_i64 fp0
= tcg_temp_new_i64();
9758 TCGv_i64 fp1
= tcg_temp_new_i64();
9759 gen_load_fpr64(ctx
, fp0
, fs
);
9760 gen_load_fpr64(ctx
, fp1
, ft
);
9761 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9762 gen_store_fpr64(ctx
, fp1
, fd
);
9763 tcg_temp_free_i64(fp1
);
9764 tcg_temp_free_i64(fp0
);
9767 check_cp1_64bitmode(ctx
);
9769 TCGv_i64 fp0
= tcg_temp_new_i64();
9771 gen_load_fpr64(ctx
, fp0
, fs
);
9772 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9773 gen_store_fpr64(ctx
, fp0
, fd
);
9774 tcg_temp_free_i64(fp0
);
9778 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9779 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9781 TCGv_i64 fp0
= tcg_temp_new_i64();
9782 TCGv_i64 fp1
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_load_fpr64(ctx
, fp1
, ft
);
9785 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9786 gen_store_fpr64(ctx
, fp1
, fd
);
9787 tcg_temp_free_i64(fp1
);
9788 tcg_temp_free_i64(fp0
);
9791 check_cp1_64bitmode(ctx
);
9793 TCGv_i64 fp0
= tcg_temp_new_i64();
9795 gen_load_fpr64(ctx
, fp0
, fs
);
9796 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9797 gen_store_fpr64(ctx
, fp0
, fd
);
9798 tcg_temp_free_i64(fp0
);
9802 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9803 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9805 TCGv_i64 fp0
= tcg_temp_new_i64();
9806 TCGv_i64 fp1
= tcg_temp_new_i64();
9807 gen_load_fpr64(ctx
, fp0
, fs
);
9808 gen_load_fpr64(ctx
, fp1
, ft
);
9809 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9810 gen_store_fpr64(ctx
, fp1
, fd
);
9811 tcg_temp_free_i64(fp1
);
9812 tcg_temp_free_i64(fp0
);
9815 check_cp1_64bitmode(ctx
);
9817 TCGv_i64 fp0
= tcg_temp_new_i64();
9818 TCGv_i64 fp1
= tcg_temp_new_i64();
9820 gen_load_fpr64(ctx
, fp0
, fs
);
9821 gen_load_fpr64(ctx
, fp1
, ft
);
9822 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9823 tcg_temp_free_i64(fp1
);
9824 gen_store_fpr64(ctx
, fp0
, fd
);
9825 tcg_temp_free_i64(fp0
);
9838 case OPC_CMP_NGLE_D
:
9845 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9846 if (ctx
->opcode
& (1 << 6)) {
9847 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9849 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9853 check_cp1_registers(ctx
, fs
);
9855 TCGv_i32 fp32
= tcg_temp_new_i32();
9856 TCGv_i64 fp64
= tcg_temp_new_i64();
9858 gen_load_fpr64(ctx
, fp64
, fs
);
9859 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9860 tcg_temp_free_i64(fp64
);
9861 gen_store_fpr32(ctx
, fp32
, fd
);
9862 tcg_temp_free_i32(fp32
);
9866 check_cp1_registers(ctx
, fs
);
9868 TCGv_i32 fp32
= tcg_temp_new_i32();
9869 TCGv_i64 fp64
= tcg_temp_new_i64();
9871 gen_load_fpr64(ctx
, fp64
, fs
);
9873 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
9875 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
9877 tcg_temp_free_i64(fp64
);
9878 gen_store_fpr32(ctx
, fp32
, fd
);
9879 tcg_temp_free_i32(fp32
);
9883 check_cp1_64bitmode(ctx
);
9885 TCGv_i64 fp0
= tcg_temp_new_i64();
9887 gen_load_fpr64(ctx
, fp0
, fs
);
9889 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
9891 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
9893 gen_store_fpr64(ctx
, fp0
, fd
);
9894 tcg_temp_free_i64(fp0
);
9899 TCGv_i32 fp0
= tcg_temp_new_i32();
9901 gen_load_fpr32(ctx
, fp0
, fs
);
9902 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9903 gen_store_fpr32(ctx
, fp0
, fd
);
9904 tcg_temp_free_i32(fp0
);
9908 check_cp1_registers(ctx
, fd
);
9910 TCGv_i32 fp32
= tcg_temp_new_i32();
9911 TCGv_i64 fp64
= tcg_temp_new_i64();
9913 gen_load_fpr32(ctx
, fp32
, fs
);
9914 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9915 tcg_temp_free_i32(fp32
);
9916 gen_store_fpr64(ctx
, fp64
, fd
);
9917 tcg_temp_free_i64(fp64
);
9921 check_cp1_64bitmode(ctx
);
9923 TCGv_i32 fp32
= tcg_temp_new_i32();
9924 TCGv_i64 fp64
= tcg_temp_new_i64();
9926 gen_load_fpr64(ctx
, fp64
, fs
);
9927 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9928 tcg_temp_free_i64(fp64
);
9929 gen_store_fpr32(ctx
, fp32
, fd
);
9930 tcg_temp_free_i32(fp32
);
9934 check_cp1_64bitmode(ctx
);
9936 TCGv_i64 fp0
= tcg_temp_new_i64();
9938 gen_load_fpr64(ctx
, fp0
, fs
);
9939 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9940 gen_store_fpr64(ctx
, fp0
, fd
);
9941 tcg_temp_free_i64(fp0
);
9947 TCGv_i64 fp0
= tcg_temp_new_i64();
9949 gen_load_fpr64(ctx
, fp0
, fs
);
9950 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9951 gen_store_fpr64(ctx
, fp0
, fd
);
9952 tcg_temp_free_i64(fp0
);
9958 TCGv_i64 fp0
= tcg_temp_new_i64();
9959 TCGv_i64 fp1
= tcg_temp_new_i64();
9961 gen_load_fpr64(ctx
, fp0
, fs
);
9962 gen_load_fpr64(ctx
, fp1
, ft
);
9963 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9964 tcg_temp_free_i64(fp1
);
9965 gen_store_fpr64(ctx
, fp0
, fd
);
9966 tcg_temp_free_i64(fp0
);
9972 TCGv_i64 fp0
= tcg_temp_new_i64();
9973 TCGv_i64 fp1
= tcg_temp_new_i64();
9975 gen_load_fpr64(ctx
, fp0
, fs
);
9976 gen_load_fpr64(ctx
, fp1
, ft
);
9977 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9978 tcg_temp_free_i64(fp1
);
9979 gen_store_fpr64(ctx
, fp0
, fd
);
9980 tcg_temp_free_i64(fp0
);
9986 TCGv_i64 fp0
= tcg_temp_new_i64();
9987 TCGv_i64 fp1
= tcg_temp_new_i64();
9989 gen_load_fpr64(ctx
, fp0
, fs
);
9990 gen_load_fpr64(ctx
, fp1
, ft
);
9991 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9992 tcg_temp_free_i64(fp1
);
9993 gen_store_fpr64(ctx
, fp0
, fd
);
9994 tcg_temp_free_i64(fp0
);
10000 TCGv_i64 fp0
= tcg_temp_new_i64();
10002 gen_load_fpr64(ctx
, fp0
, fs
);
10003 gen_helper_float_abs_ps(fp0
, fp0
);
10004 gen_store_fpr64(ctx
, fp0
, fd
);
10005 tcg_temp_free_i64(fp0
);
10011 TCGv_i64 fp0
= tcg_temp_new_i64();
10013 gen_load_fpr64(ctx
, fp0
, fs
);
10014 gen_store_fpr64(ctx
, fp0
, fd
);
10015 tcg_temp_free_i64(fp0
);
10021 TCGv_i64 fp0
= tcg_temp_new_i64();
10023 gen_load_fpr64(ctx
, fp0
, fs
);
10024 gen_helper_float_chs_ps(fp0
, fp0
);
10025 gen_store_fpr64(ctx
, fp0
, fd
);
10026 tcg_temp_free_i64(fp0
);
10031 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10036 TCGLabel
*l1
= gen_new_label();
10040 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10041 fp0
= tcg_temp_new_i64();
10042 gen_load_fpr64(ctx
, fp0
, fs
);
10043 gen_store_fpr64(ctx
, fp0
, fd
);
10044 tcg_temp_free_i64(fp0
);
10051 TCGLabel
*l1
= gen_new_label();
10055 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10056 fp0
= tcg_temp_new_i64();
10057 gen_load_fpr64(ctx
, fp0
, fs
);
10058 gen_store_fpr64(ctx
, fp0
, fd
);
10059 tcg_temp_free_i64(fp0
);
10067 TCGv_i64 fp0
= tcg_temp_new_i64();
10068 TCGv_i64 fp1
= tcg_temp_new_i64();
10070 gen_load_fpr64(ctx
, fp0
, ft
);
10071 gen_load_fpr64(ctx
, fp1
, fs
);
10072 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10073 tcg_temp_free_i64(fp1
);
10074 gen_store_fpr64(ctx
, fp0
, fd
);
10075 tcg_temp_free_i64(fp0
);
10081 TCGv_i64 fp0
= tcg_temp_new_i64();
10082 TCGv_i64 fp1
= tcg_temp_new_i64();
10084 gen_load_fpr64(ctx
, fp0
, ft
);
10085 gen_load_fpr64(ctx
, fp1
, fs
);
10086 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10087 tcg_temp_free_i64(fp1
);
10088 gen_store_fpr64(ctx
, fp0
, fd
);
10089 tcg_temp_free_i64(fp0
);
10092 case OPC_RECIP2_PS
:
10095 TCGv_i64 fp0
= tcg_temp_new_i64();
10096 TCGv_i64 fp1
= tcg_temp_new_i64();
10098 gen_load_fpr64(ctx
, fp0
, fs
);
10099 gen_load_fpr64(ctx
, fp1
, ft
);
10100 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10101 tcg_temp_free_i64(fp1
);
10102 gen_store_fpr64(ctx
, fp0
, fd
);
10103 tcg_temp_free_i64(fp0
);
10106 case OPC_RECIP1_PS
:
10109 TCGv_i64 fp0
= tcg_temp_new_i64();
10111 gen_load_fpr64(ctx
, fp0
, fs
);
10112 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10113 gen_store_fpr64(ctx
, fp0
, fd
);
10114 tcg_temp_free_i64(fp0
);
10117 case OPC_RSQRT1_PS
:
10120 TCGv_i64 fp0
= tcg_temp_new_i64();
10122 gen_load_fpr64(ctx
, fp0
, fs
);
10123 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10124 gen_store_fpr64(ctx
, fp0
, fd
);
10125 tcg_temp_free_i64(fp0
);
10128 case OPC_RSQRT2_PS
:
10131 TCGv_i64 fp0
= tcg_temp_new_i64();
10132 TCGv_i64 fp1
= tcg_temp_new_i64();
10134 gen_load_fpr64(ctx
, fp0
, fs
);
10135 gen_load_fpr64(ctx
, fp1
, ft
);
10136 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10137 tcg_temp_free_i64(fp1
);
10138 gen_store_fpr64(ctx
, fp0
, fd
);
10139 tcg_temp_free_i64(fp0
);
10143 check_cp1_64bitmode(ctx
);
10145 TCGv_i32 fp0
= tcg_temp_new_i32();
10147 gen_load_fpr32h(ctx
, fp0
, fs
);
10148 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10149 gen_store_fpr32(ctx
, fp0
, fd
);
10150 tcg_temp_free_i32(fp0
);
10153 case OPC_CVT_PW_PS
:
10156 TCGv_i64 fp0
= tcg_temp_new_i64();
10158 gen_load_fpr64(ctx
, fp0
, fs
);
10159 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10160 gen_store_fpr64(ctx
, fp0
, fd
);
10161 tcg_temp_free_i64(fp0
);
10165 check_cp1_64bitmode(ctx
);
10167 TCGv_i32 fp0
= tcg_temp_new_i32();
10169 gen_load_fpr32(ctx
, fp0
, fs
);
10170 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10171 gen_store_fpr32(ctx
, fp0
, fd
);
10172 tcg_temp_free_i32(fp0
);
10178 TCGv_i32 fp0
= tcg_temp_new_i32();
10179 TCGv_i32 fp1
= tcg_temp_new_i32();
10181 gen_load_fpr32(ctx
, fp0
, fs
);
10182 gen_load_fpr32(ctx
, fp1
, ft
);
10183 gen_store_fpr32h(ctx
, fp0
, fd
);
10184 gen_store_fpr32(ctx
, fp1
, fd
);
10185 tcg_temp_free_i32(fp0
);
10186 tcg_temp_free_i32(fp1
);
10192 TCGv_i32 fp0
= tcg_temp_new_i32();
10193 TCGv_i32 fp1
= tcg_temp_new_i32();
10195 gen_load_fpr32(ctx
, fp0
, fs
);
10196 gen_load_fpr32h(ctx
, fp1
, ft
);
10197 gen_store_fpr32(ctx
, fp1
, fd
);
10198 gen_store_fpr32h(ctx
, fp0
, fd
);
10199 tcg_temp_free_i32(fp0
);
10200 tcg_temp_free_i32(fp1
);
10206 TCGv_i32 fp0
= tcg_temp_new_i32();
10207 TCGv_i32 fp1
= tcg_temp_new_i32();
10209 gen_load_fpr32h(ctx
, fp0
, fs
);
10210 gen_load_fpr32(ctx
, fp1
, ft
);
10211 gen_store_fpr32(ctx
, fp1
, fd
);
10212 gen_store_fpr32h(ctx
, fp0
, fd
);
10213 tcg_temp_free_i32(fp0
);
10214 tcg_temp_free_i32(fp1
);
10220 TCGv_i32 fp0
= tcg_temp_new_i32();
10221 TCGv_i32 fp1
= tcg_temp_new_i32();
10223 gen_load_fpr32h(ctx
, fp0
, fs
);
10224 gen_load_fpr32h(ctx
, fp1
, ft
);
10225 gen_store_fpr32(ctx
, fp1
, fd
);
10226 gen_store_fpr32h(ctx
, fp0
, fd
);
10227 tcg_temp_free_i32(fp0
);
10228 tcg_temp_free_i32(fp1
);
10232 case OPC_CMP_UN_PS
:
10233 case OPC_CMP_EQ_PS
:
10234 case OPC_CMP_UEQ_PS
:
10235 case OPC_CMP_OLT_PS
:
10236 case OPC_CMP_ULT_PS
:
10237 case OPC_CMP_OLE_PS
:
10238 case OPC_CMP_ULE_PS
:
10239 case OPC_CMP_SF_PS
:
10240 case OPC_CMP_NGLE_PS
:
10241 case OPC_CMP_SEQ_PS
:
10242 case OPC_CMP_NGL_PS
:
10243 case OPC_CMP_LT_PS
:
10244 case OPC_CMP_NGE_PS
:
10245 case OPC_CMP_LE_PS
:
10246 case OPC_CMP_NGT_PS
:
10247 if (ctx
->opcode
& (1 << 6)) {
10248 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10250 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10254 MIPS_INVAL("farith");
10255 generate_exception_end(ctx
, EXCP_RI
);
10260 /* Coprocessor 3 (FPU) */
10261 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10262 int fd
, int fs
, int base
, int index
)
10264 TCGv t0
= tcg_temp_new();
10267 gen_load_gpr(t0
, index
);
10268 } else if (index
== 0) {
10269 gen_load_gpr(t0
, base
);
10271 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10273 /* Don't do NOP if destination is zero: we must perform the actual
10279 TCGv_i32 fp0
= tcg_temp_new_i32();
10281 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10282 tcg_gen_trunc_tl_i32(fp0
, t0
);
10283 gen_store_fpr32(ctx
, fp0
, fd
);
10284 tcg_temp_free_i32(fp0
);
10289 check_cp1_registers(ctx
, fd
);
10291 TCGv_i64 fp0
= tcg_temp_new_i64();
10292 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10293 gen_store_fpr64(ctx
, fp0
, fd
);
10294 tcg_temp_free_i64(fp0
);
10298 check_cp1_64bitmode(ctx
);
10299 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10301 TCGv_i64 fp0
= tcg_temp_new_i64();
10303 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10304 gen_store_fpr64(ctx
, fp0
, fd
);
10305 tcg_temp_free_i64(fp0
);
10311 TCGv_i32 fp0
= tcg_temp_new_i32();
10312 gen_load_fpr32(ctx
, fp0
, fs
);
10313 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10314 tcg_temp_free_i32(fp0
);
10319 check_cp1_registers(ctx
, fs
);
10321 TCGv_i64 fp0
= tcg_temp_new_i64();
10322 gen_load_fpr64(ctx
, fp0
, fs
);
10323 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10324 tcg_temp_free_i64(fp0
);
10328 check_cp1_64bitmode(ctx
);
10329 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10331 TCGv_i64 fp0
= tcg_temp_new_i64();
10332 gen_load_fpr64(ctx
, fp0
, fs
);
10333 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10334 tcg_temp_free_i64(fp0
);
10341 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10342 int fd
, int fr
, int fs
, int ft
)
10348 TCGv t0
= tcg_temp_local_new();
10349 TCGv_i32 fp
= tcg_temp_new_i32();
10350 TCGv_i32 fph
= tcg_temp_new_i32();
10351 TCGLabel
*l1
= gen_new_label();
10352 TCGLabel
*l2
= gen_new_label();
10354 gen_load_gpr(t0
, fr
);
10355 tcg_gen_andi_tl(t0
, t0
, 0x7);
10357 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10358 gen_load_fpr32(ctx
, fp
, fs
);
10359 gen_load_fpr32h(ctx
, fph
, fs
);
10360 gen_store_fpr32(ctx
, fp
, fd
);
10361 gen_store_fpr32h(ctx
, fph
, fd
);
10364 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10366 #ifdef TARGET_WORDS_BIGENDIAN
10367 gen_load_fpr32(ctx
, fp
, fs
);
10368 gen_load_fpr32h(ctx
, fph
, ft
);
10369 gen_store_fpr32h(ctx
, fp
, fd
);
10370 gen_store_fpr32(ctx
, fph
, fd
);
10372 gen_load_fpr32h(ctx
, fph
, fs
);
10373 gen_load_fpr32(ctx
, fp
, ft
);
10374 gen_store_fpr32(ctx
, fph
, fd
);
10375 gen_store_fpr32h(ctx
, fp
, fd
);
10378 tcg_temp_free_i32(fp
);
10379 tcg_temp_free_i32(fph
);
10385 TCGv_i32 fp0
= tcg_temp_new_i32();
10386 TCGv_i32 fp1
= tcg_temp_new_i32();
10387 TCGv_i32 fp2
= tcg_temp_new_i32();
10389 gen_load_fpr32(ctx
, fp0
, fs
);
10390 gen_load_fpr32(ctx
, fp1
, ft
);
10391 gen_load_fpr32(ctx
, fp2
, fr
);
10392 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10393 tcg_temp_free_i32(fp0
);
10394 tcg_temp_free_i32(fp1
);
10395 gen_store_fpr32(ctx
, fp2
, fd
);
10396 tcg_temp_free_i32(fp2
);
10401 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10403 TCGv_i64 fp0
= tcg_temp_new_i64();
10404 TCGv_i64 fp1
= tcg_temp_new_i64();
10405 TCGv_i64 fp2
= tcg_temp_new_i64();
10407 gen_load_fpr64(ctx
, fp0
, fs
);
10408 gen_load_fpr64(ctx
, fp1
, ft
);
10409 gen_load_fpr64(ctx
, fp2
, fr
);
10410 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10411 tcg_temp_free_i64(fp0
);
10412 tcg_temp_free_i64(fp1
);
10413 gen_store_fpr64(ctx
, fp2
, fd
);
10414 tcg_temp_free_i64(fp2
);
10420 TCGv_i64 fp0
= tcg_temp_new_i64();
10421 TCGv_i64 fp1
= tcg_temp_new_i64();
10422 TCGv_i64 fp2
= tcg_temp_new_i64();
10424 gen_load_fpr64(ctx
, fp0
, fs
);
10425 gen_load_fpr64(ctx
, fp1
, ft
);
10426 gen_load_fpr64(ctx
, fp2
, fr
);
10427 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10428 tcg_temp_free_i64(fp0
);
10429 tcg_temp_free_i64(fp1
);
10430 gen_store_fpr64(ctx
, fp2
, fd
);
10431 tcg_temp_free_i64(fp2
);
10437 TCGv_i32 fp0
= tcg_temp_new_i32();
10438 TCGv_i32 fp1
= tcg_temp_new_i32();
10439 TCGv_i32 fp2
= tcg_temp_new_i32();
10441 gen_load_fpr32(ctx
, fp0
, fs
);
10442 gen_load_fpr32(ctx
, fp1
, ft
);
10443 gen_load_fpr32(ctx
, fp2
, fr
);
10444 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10445 tcg_temp_free_i32(fp0
);
10446 tcg_temp_free_i32(fp1
);
10447 gen_store_fpr32(ctx
, fp2
, fd
);
10448 tcg_temp_free_i32(fp2
);
10453 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10455 TCGv_i64 fp0
= tcg_temp_new_i64();
10456 TCGv_i64 fp1
= tcg_temp_new_i64();
10457 TCGv_i64 fp2
= tcg_temp_new_i64();
10459 gen_load_fpr64(ctx
, fp0
, fs
);
10460 gen_load_fpr64(ctx
, fp1
, ft
);
10461 gen_load_fpr64(ctx
, fp2
, fr
);
10462 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10463 tcg_temp_free_i64(fp0
);
10464 tcg_temp_free_i64(fp1
);
10465 gen_store_fpr64(ctx
, fp2
, fd
);
10466 tcg_temp_free_i64(fp2
);
10472 TCGv_i64 fp0
= tcg_temp_new_i64();
10473 TCGv_i64 fp1
= tcg_temp_new_i64();
10474 TCGv_i64 fp2
= tcg_temp_new_i64();
10476 gen_load_fpr64(ctx
, fp0
, fs
);
10477 gen_load_fpr64(ctx
, fp1
, ft
);
10478 gen_load_fpr64(ctx
, fp2
, fr
);
10479 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10480 tcg_temp_free_i64(fp0
);
10481 tcg_temp_free_i64(fp1
);
10482 gen_store_fpr64(ctx
, fp2
, fd
);
10483 tcg_temp_free_i64(fp2
);
10489 TCGv_i32 fp0
= tcg_temp_new_i32();
10490 TCGv_i32 fp1
= tcg_temp_new_i32();
10491 TCGv_i32 fp2
= tcg_temp_new_i32();
10493 gen_load_fpr32(ctx
, fp0
, fs
);
10494 gen_load_fpr32(ctx
, fp1
, ft
);
10495 gen_load_fpr32(ctx
, fp2
, fr
);
10496 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10497 tcg_temp_free_i32(fp0
);
10498 tcg_temp_free_i32(fp1
);
10499 gen_store_fpr32(ctx
, fp2
, fd
);
10500 tcg_temp_free_i32(fp2
);
10505 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10507 TCGv_i64 fp0
= tcg_temp_new_i64();
10508 TCGv_i64 fp1
= tcg_temp_new_i64();
10509 TCGv_i64 fp2
= tcg_temp_new_i64();
10511 gen_load_fpr64(ctx
, fp0
, fs
);
10512 gen_load_fpr64(ctx
, fp1
, ft
);
10513 gen_load_fpr64(ctx
, fp2
, fr
);
10514 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10515 tcg_temp_free_i64(fp0
);
10516 tcg_temp_free_i64(fp1
);
10517 gen_store_fpr64(ctx
, fp2
, fd
);
10518 tcg_temp_free_i64(fp2
);
10524 TCGv_i64 fp0
= tcg_temp_new_i64();
10525 TCGv_i64 fp1
= tcg_temp_new_i64();
10526 TCGv_i64 fp2
= tcg_temp_new_i64();
10528 gen_load_fpr64(ctx
, fp0
, fs
);
10529 gen_load_fpr64(ctx
, fp1
, ft
);
10530 gen_load_fpr64(ctx
, fp2
, fr
);
10531 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10532 tcg_temp_free_i64(fp0
);
10533 tcg_temp_free_i64(fp1
);
10534 gen_store_fpr64(ctx
, fp2
, fd
);
10535 tcg_temp_free_i64(fp2
);
10541 TCGv_i32 fp0
= tcg_temp_new_i32();
10542 TCGv_i32 fp1
= tcg_temp_new_i32();
10543 TCGv_i32 fp2
= tcg_temp_new_i32();
10545 gen_load_fpr32(ctx
, fp0
, fs
);
10546 gen_load_fpr32(ctx
, fp1
, ft
);
10547 gen_load_fpr32(ctx
, fp2
, fr
);
10548 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10549 tcg_temp_free_i32(fp0
);
10550 tcg_temp_free_i32(fp1
);
10551 gen_store_fpr32(ctx
, fp2
, fd
);
10552 tcg_temp_free_i32(fp2
);
10557 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10559 TCGv_i64 fp0
= tcg_temp_new_i64();
10560 TCGv_i64 fp1
= tcg_temp_new_i64();
10561 TCGv_i64 fp2
= tcg_temp_new_i64();
10563 gen_load_fpr64(ctx
, fp0
, fs
);
10564 gen_load_fpr64(ctx
, fp1
, ft
);
10565 gen_load_fpr64(ctx
, fp2
, fr
);
10566 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10567 tcg_temp_free_i64(fp0
);
10568 tcg_temp_free_i64(fp1
);
10569 gen_store_fpr64(ctx
, fp2
, fd
);
10570 tcg_temp_free_i64(fp2
);
10576 TCGv_i64 fp0
= tcg_temp_new_i64();
10577 TCGv_i64 fp1
= tcg_temp_new_i64();
10578 TCGv_i64 fp2
= tcg_temp_new_i64();
10580 gen_load_fpr64(ctx
, fp0
, fs
);
10581 gen_load_fpr64(ctx
, fp1
, ft
);
10582 gen_load_fpr64(ctx
, fp2
, fr
);
10583 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10584 tcg_temp_free_i64(fp0
);
10585 tcg_temp_free_i64(fp1
);
10586 gen_store_fpr64(ctx
, fp2
, fd
);
10587 tcg_temp_free_i64(fp2
);
10591 MIPS_INVAL("flt3_arith");
10592 generate_exception_end(ctx
, EXCP_RI
);
10597 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10601 #if !defined(CONFIG_USER_ONLY)
10602 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10603 Therefore only check the ISA in system mode. */
10604 check_insn(ctx
, ISA_MIPS32R2
);
10606 t0
= tcg_temp_new();
10610 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10611 gen_store_gpr(t0
, rt
);
10614 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10615 gen_store_gpr(t0
, rt
);
10618 gen_helper_rdhwr_cc(t0
, cpu_env
);
10619 gen_store_gpr(t0
, rt
);
10622 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10623 gen_store_gpr(t0
, rt
);
10626 check_insn(ctx
, ISA_MIPS32R6
);
10628 /* Performance counter registers are not implemented other than
10629 * control register 0.
10631 generate_exception(ctx
, EXCP_RI
);
10633 gen_helper_rdhwr_performance(t0
, cpu_env
);
10634 gen_store_gpr(t0
, rt
);
10637 check_insn(ctx
, ISA_MIPS32R6
);
10638 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10639 gen_store_gpr(t0
, rt
);
10642 #if defined(CONFIG_USER_ONLY)
10643 tcg_gen_ld_tl(t0
, cpu_env
,
10644 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10645 gen_store_gpr(t0
, rt
);
10648 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10649 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10650 tcg_gen_ld_tl(t0
, cpu_env
,
10651 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10652 gen_store_gpr(t0
, rt
);
10654 generate_exception_end(ctx
, EXCP_RI
);
10658 default: /* Invalid */
10659 MIPS_INVAL("rdhwr");
10660 generate_exception_end(ctx
, EXCP_RI
);
10666 static inline void clear_branch_hflags(DisasContext
*ctx
)
10668 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10669 if (ctx
->bstate
== BS_NONE
) {
10670 save_cpu_state(ctx
, 0);
10672 /* it is not safe to save ctx->hflags as hflags may be changed
10673 in execution time by the instruction in delay / forbidden slot. */
10674 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10678 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10680 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10681 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10682 /* Branches completion */
10683 clear_branch_hflags(ctx
);
10684 ctx
->bstate
= BS_BRANCH
;
10685 /* FIXME: Need to clear can_do_io. */
10686 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10687 case MIPS_HFLAG_FBNSLOT
:
10688 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10691 /* unconditional branch */
10692 if (proc_hflags
& MIPS_HFLAG_BX
) {
10693 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10695 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10697 case MIPS_HFLAG_BL
:
10698 /* blikely taken case */
10699 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10701 case MIPS_HFLAG_BC
:
10702 /* Conditional branch */
10704 TCGLabel
*l1
= gen_new_label();
10706 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10707 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10709 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10712 case MIPS_HFLAG_BR
:
10713 /* unconditional branch to register */
10714 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10715 TCGv t0
= tcg_temp_new();
10716 TCGv_i32 t1
= tcg_temp_new_i32();
10718 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10719 tcg_gen_trunc_tl_i32(t1
, t0
);
10721 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10722 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10723 tcg_gen_or_i32(hflags
, hflags
, t1
);
10724 tcg_temp_free_i32(t1
);
10726 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10728 tcg_gen_mov_tl(cpu_PC
, btarget
);
10730 if (ctx
->singlestep_enabled
) {
10731 save_cpu_state(ctx
, 0);
10732 gen_helper_raise_exception_debug(cpu_env
);
10734 tcg_gen_lookup_and_goto_ptr(cpu_PC
);
10737 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10743 /* Compact Branches */
10744 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10745 int rs
, int rt
, int32_t offset
)
10747 int bcond_compute
= 0;
10748 TCGv t0
= tcg_temp_new();
10749 TCGv t1
= tcg_temp_new();
10750 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10752 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10753 #ifdef MIPS_DEBUG_DISAS
10754 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10757 generate_exception_end(ctx
, EXCP_RI
);
10761 /* Load needed operands and calculate btarget */
10763 /* compact branch */
10764 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10765 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10766 gen_load_gpr(t0
, rs
);
10767 gen_load_gpr(t1
, rt
);
10769 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10770 if (rs
<= rt
&& rs
== 0) {
10771 /* OPC_BEQZALC, OPC_BNEZALC */
10772 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10775 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10776 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10777 gen_load_gpr(t0
, rs
);
10778 gen_load_gpr(t1
, rt
);
10780 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10782 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10783 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10784 if (rs
== 0 || rs
== rt
) {
10785 /* OPC_BLEZALC, OPC_BGEZALC */
10786 /* OPC_BGTZALC, OPC_BLTZALC */
10787 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10789 gen_load_gpr(t0
, rs
);
10790 gen_load_gpr(t1
, rt
);
10792 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10796 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10801 /* OPC_BEQZC, OPC_BNEZC */
10802 gen_load_gpr(t0
, rs
);
10804 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10806 /* OPC_JIC, OPC_JIALC */
10807 TCGv tbase
= tcg_temp_new();
10808 TCGv toffset
= tcg_temp_new();
10810 gen_load_gpr(tbase
, rt
);
10811 tcg_gen_movi_tl(toffset
, offset
);
10812 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10813 tcg_temp_free(tbase
);
10814 tcg_temp_free(toffset
);
10818 MIPS_INVAL("Compact branch/jump");
10819 generate_exception_end(ctx
, EXCP_RI
);
10823 if (bcond_compute
== 0) {
10824 /* Uncoditional compact branch */
10827 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10830 ctx
->hflags
|= MIPS_HFLAG_BR
;
10833 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10836 ctx
->hflags
|= MIPS_HFLAG_B
;
10839 MIPS_INVAL("Compact branch/jump");
10840 generate_exception_end(ctx
, EXCP_RI
);
10844 /* Generating branch here as compact branches don't have delay slot */
10845 gen_branch(ctx
, 4);
10847 /* Conditional compact branch */
10848 TCGLabel
*fs
= gen_new_label();
10849 save_cpu_state(ctx
, 0);
10852 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10853 if (rs
== 0 && rt
!= 0) {
10855 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10856 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10858 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10861 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10864 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10865 if (rs
== 0 && rt
!= 0) {
10867 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10868 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10870 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10873 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10876 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10877 if (rs
== 0 && rt
!= 0) {
10879 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10880 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10882 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10885 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10888 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10889 if (rs
== 0 && rt
!= 0) {
10891 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10892 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10894 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10897 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10900 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10901 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10903 /* OPC_BOVC, OPC_BNVC */
10904 TCGv t2
= tcg_temp_new();
10905 TCGv t3
= tcg_temp_new();
10906 TCGv t4
= tcg_temp_new();
10907 TCGv input_overflow
= tcg_temp_new();
10909 gen_load_gpr(t0
, rs
);
10910 gen_load_gpr(t1
, rt
);
10911 tcg_gen_ext32s_tl(t2
, t0
);
10912 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10913 tcg_gen_ext32s_tl(t3
, t1
);
10914 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10915 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10917 tcg_gen_add_tl(t4
, t2
, t3
);
10918 tcg_gen_ext32s_tl(t4
, t4
);
10919 tcg_gen_xor_tl(t2
, t2
, t3
);
10920 tcg_gen_xor_tl(t3
, t4
, t3
);
10921 tcg_gen_andc_tl(t2
, t3
, t2
);
10922 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10923 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10924 if (opc
== OPC_BOVC
) {
10926 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10931 tcg_temp_free(input_overflow
);
10935 } else if (rs
< rt
&& rs
== 0) {
10936 /* OPC_BEQZALC, OPC_BNEZALC */
10937 if (opc
== OPC_BEQZALC
) {
10939 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10942 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10945 /* OPC_BEQC, OPC_BNEC */
10946 if (opc
== OPC_BEQC
) {
10948 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10951 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10956 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10959 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10962 MIPS_INVAL("Compact conditional branch/jump");
10963 generate_exception_end(ctx
, EXCP_RI
);
10967 /* Generating branch here as compact branches don't have delay slot */
10968 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10971 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10979 /* ISA extensions (ASEs) */
10980 /* MIPS16 extension to MIPS32 */
10982 /* MIPS16 major opcodes */
10984 M16_OPC_ADDIUSP
= 0x00,
10985 M16_OPC_ADDIUPC
= 0x01,
10987 M16_OPC_JAL
= 0x03,
10988 M16_OPC_BEQZ
= 0x04,
10989 M16_OPC_BNEQZ
= 0x05,
10990 M16_OPC_SHIFT
= 0x06,
10992 M16_OPC_RRIA
= 0x08,
10993 M16_OPC_ADDIU8
= 0x09,
10994 M16_OPC_SLTI
= 0x0a,
10995 M16_OPC_SLTIU
= 0x0b,
10998 M16_OPC_CMPI
= 0x0e,
11002 M16_OPC_LWSP
= 0x12,
11004 M16_OPC_LBU
= 0x14,
11005 M16_OPC_LHU
= 0x15,
11006 M16_OPC_LWPC
= 0x16,
11007 M16_OPC_LWU
= 0x17,
11010 M16_OPC_SWSP
= 0x1a,
11012 M16_OPC_RRR
= 0x1c,
11014 M16_OPC_EXTEND
= 0x1e,
11018 /* I8 funct field */
11037 /* RR funct field */
11071 /* I64 funct field */
11079 I64_DADDIUPC
= 0x6,
11083 /* RR ry field for CNVT */
11085 RR_RY_CNVT_ZEB
= 0x0,
11086 RR_RY_CNVT_ZEH
= 0x1,
11087 RR_RY_CNVT_ZEW
= 0x2,
11088 RR_RY_CNVT_SEB
= 0x4,
11089 RR_RY_CNVT_SEH
= 0x5,
11090 RR_RY_CNVT_SEW
= 0x6,
11093 static int xlat (int r
)
11095 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11100 static void gen_mips16_save (DisasContext
*ctx
,
11101 int xsregs
, int aregs
,
11102 int do_ra
, int do_s0
, int do_s1
,
11105 TCGv t0
= tcg_temp_new();
11106 TCGv t1
= tcg_temp_new();
11107 TCGv t2
= tcg_temp_new();
11137 generate_exception_end(ctx
, EXCP_RI
);
11143 gen_base_offset_addr(ctx
, t0
, 29, 12);
11144 gen_load_gpr(t1
, 7);
11145 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11148 gen_base_offset_addr(ctx
, t0
, 29, 8);
11149 gen_load_gpr(t1
, 6);
11150 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11153 gen_base_offset_addr(ctx
, t0
, 29, 4);
11154 gen_load_gpr(t1
, 5);
11155 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11158 gen_base_offset_addr(ctx
, t0
, 29, 0);
11159 gen_load_gpr(t1
, 4);
11160 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11163 gen_load_gpr(t0
, 29);
11165 #define DECR_AND_STORE(reg) do { \
11166 tcg_gen_movi_tl(t2, -4); \
11167 gen_op_addr_add(ctx, t0, t0, t2); \
11168 gen_load_gpr(t1, reg); \
11169 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11173 DECR_AND_STORE(31);
11178 DECR_AND_STORE(30);
11181 DECR_AND_STORE(23);
11184 DECR_AND_STORE(22);
11187 DECR_AND_STORE(21);
11190 DECR_AND_STORE(20);
11193 DECR_AND_STORE(19);
11196 DECR_AND_STORE(18);
11200 DECR_AND_STORE(17);
11203 DECR_AND_STORE(16);
11233 generate_exception_end(ctx
, EXCP_RI
);
11249 #undef DECR_AND_STORE
11251 tcg_gen_movi_tl(t2
, -framesize
);
11252 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11258 static void gen_mips16_restore (DisasContext
*ctx
,
11259 int xsregs
, int aregs
,
11260 int do_ra
, int do_s0
, int do_s1
,
11264 TCGv t0
= tcg_temp_new();
11265 TCGv t1
= tcg_temp_new();
11266 TCGv t2
= tcg_temp_new();
11268 tcg_gen_movi_tl(t2
, framesize
);
11269 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11271 #define DECR_AND_LOAD(reg) do { \
11272 tcg_gen_movi_tl(t2, -4); \
11273 gen_op_addr_add(ctx, t0, t0, t2); \
11274 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11275 gen_store_gpr(t1, reg); \
11339 generate_exception_end(ctx
, EXCP_RI
);
11355 #undef DECR_AND_LOAD
11357 tcg_gen_movi_tl(t2
, framesize
);
11358 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11364 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11365 int is_64_bit
, int extended
)
11369 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11370 generate_exception_end(ctx
, EXCP_RI
);
11374 t0
= tcg_temp_new();
11376 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11377 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11379 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11385 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11388 TCGv_i32 t0
= tcg_const_i32(op
);
11389 TCGv t1
= tcg_temp_new();
11390 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11391 gen_helper_cache(cpu_env
, t1
, t0
);
11394 #if defined(TARGET_MIPS64)
11395 static void decode_i64_mips16 (DisasContext
*ctx
,
11396 int ry
, int funct
, int16_t offset
,
11401 check_insn(ctx
, ISA_MIPS3
);
11402 check_mips_64(ctx
);
11403 offset
= extended
? offset
: offset
<< 3;
11404 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11407 check_insn(ctx
, ISA_MIPS3
);
11408 check_mips_64(ctx
);
11409 offset
= extended
? offset
: offset
<< 3;
11410 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11413 check_insn(ctx
, ISA_MIPS3
);
11414 check_mips_64(ctx
);
11415 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11416 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11419 check_insn(ctx
, ISA_MIPS3
);
11420 check_mips_64(ctx
);
11421 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11422 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11425 check_insn(ctx
, ISA_MIPS3
);
11426 check_mips_64(ctx
);
11427 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11428 generate_exception_end(ctx
, EXCP_RI
);
11430 offset
= extended
? offset
: offset
<< 3;
11431 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11435 check_insn(ctx
, ISA_MIPS3
);
11436 check_mips_64(ctx
);
11437 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11438 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11441 check_insn(ctx
, ISA_MIPS3
);
11442 check_mips_64(ctx
);
11443 offset
= extended
? offset
: offset
<< 2;
11444 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11447 check_insn(ctx
, ISA_MIPS3
);
11448 check_mips_64(ctx
);
11449 offset
= extended
? offset
: offset
<< 2;
11450 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11456 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11458 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11459 int op
, rx
, ry
, funct
, sa
;
11460 int16_t imm
, offset
;
11462 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11463 op
= (ctx
->opcode
>> 11) & 0x1f;
11464 sa
= (ctx
->opcode
>> 22) & 0x1f;
11465 funct
= (ctx
->opcode
>> 8) & 0x7;
11466 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11467 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11468 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11469 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11470 | (ctx
->opcode
& 0x1f));
11472 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11475 case M16_OPC_ADDIUSP
:
11476 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11478 case M16_OPC_ADDIUPC
:
11479 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11482 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11483 /* No delay slot, so just process as a normal instruction */
11486 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11487 /* No delay slot, so just process as a normal instruction */
11489 case M16_OPC_BNEQZ
:
11490 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11491 /* No delay slot, so just process as a normal instruction */
11493 case M16_OPC_SHIFT
:
11494 switch (ctx
->opcode
& 0x3) {
11496 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11499 #if defined(TARGET_MIPS64)
11500 check_mips_64(ctx
);
11501 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11503 generate_exception_end(ctx
, EXCP_RI
);
11507 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11510 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11514 #if defined(TARGET_MIPS64)
11516 check_insn(ctx
, ISA_MIPS3
);
11517 check_mips_64(ctx
);
11518 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11522 imm
= ctx
->opcode
& 0xf;
11523 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11524 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11525 imm
= (int16_t) (imm
<< 1) >> 1;
11526 if ((ctx
->opcode
>> 4) & 0x1) {
11527 #if defined(TARGET_MIPS64)
11528 check_mips_64(ctx
);
11529 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11531 generate_exception_end(ctx
, EXCP_RI
);
11534 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11537 case M16_OPC_ADDIU8
:
11538 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11541 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11543 case M16_OPC_SLTIU
:
11544 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11549 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11552 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11555 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11558 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11561 check_insn(ctx
, ISA_MIPS32
);
11563 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11564 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11565 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11566 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11567 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11568 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11569 | (ctx
->opcode
& 0xf)) << 3;
11571 if (ctx
->opcode
& (1 << 7)) {
11572 gen_mips16_save(ctx
, xsregs
, aregs
,
11573 do_ra
, do_s0
, do_s1
,
11576 gen_mips16_restore(ctx
, xsregs
, aregs
,
11577 do_ra
, do_s0
, do_s1
,
11583 generate_exception_end(ctx
, EXCP_RI
);
11588 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11591 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11593 #if defined(TARGET_MIPS64)
11595 check_insn(ctx
, ISA_MIPS3
);
11596 check_mips_64(ctx
);
11597 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11601 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11604 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11607 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11610 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11613 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11616 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11619 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11621 #if defined(TARGET_MIPS64)
11623 check_insn(ctx
, ISA_MIPS3
);
11624 check_mips_64(ctx
);
11625 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11629 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11632 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11635 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11638 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11640 #if defined(TARGET_MIPS64)
11642 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11646 generate_exception_end(ctx
, EXCP_RI
);
11653 static inline bool is_uhi(int sdbbp_code
)
11655 #ifdef CONFIG_USER_ONLY
11658 return semihosting_enabled() && sdbbp_code
== 1;
11662 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11666 int op
, cnvt_op
, op1
, offset
;
11670 op
= (ctx
->opcode
>> 11) & 0x1f;
11671 sa
= (ctx
->opcode
>> 2) & 0x7;
11672 sa
= sa
== 0 ? 8 : sa
;
11673 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11674 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11675 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11676 op1
= offset
= ctx
->opcode
& 0x1f;
11681 case M16_OPC_ADDIUSP
:
11683 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11685 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11688 case M16_OPC_ADDIUPC
:
11689 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11692 offset
= (ctx
->opcode
& 0x7ff) << 1;
11693 offset
= (int16_t)(offset
<< 4) >> 4;
11694 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11695 /* No delay slot, so just process as a normal instruction */
11698 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11699 offset
= (((ctx
->opcode
& 0x1f) << 21)
11700 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11702 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11703 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11707 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11708 ((int8_t)ctx
->opcode
) << 1, 0);
11709 /* No delay slot, so just process as a normal instruction */
11711 case M16_OPC_BNEQZ
:
11712 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11713 ((int8_t)ctx
->opcode
) << 1, 0);
11714 /* No delay slot, so just process as a normal instruction */
11716 case M16_OPC_SHIFT
:
11717 switch (ctx
->opcode
& 0x3) {
11719 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11722 #if defined(TARGET_MIPS64)
11723 check_insn(ctx
, ISA_MIPS3
);
11724 check_mips_64(ctx
);
11725 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11727 generate_exception_end(ctx
, EXCP_RI
);
11731 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11734 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11738 #if defined(TARGET_MIPS64)
11740 check_insn(ctx
, ISA_MIPS3
);
11741 check_mips_64(ctx
);
11742 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11747 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11749 if ((ctx
->opcode
>> 4) & 1) {
11750 #if defined(TARGET_MIPS64)
11751 check_insn(ctx
, ISA_MIPS3
);
11752 check_mips_64(ctx
);
11753 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11755 generate_exception_end(ctx
, EXCP_RI
);
11758 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11762 case M16_OPC_ADDIU8
:
11764 int16_t imm
= (int8_t) ctx
->opcode
;
11766 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11771 int16_t imm
= (uint8_t) ctx
->opcode
;
11772 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11775 case M16_OPC_SLTIU
:
11777 int16_t imm
= (uint8_t) ctx
->opcode
;
11778 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11785 funct
= (ctx
->opcode
>> 8) & 0x7;
11788 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11789 ((int8_t)ctx
->opcode
) << 1, 0);
11792 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11793 ((int8_t)ctx
->opcode
) << 1, 0);
11796 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11799 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11800 ((int8_t)ctx
->opcode
) << 3);
11803 check_insn(ctx
, ISA_MIPS32
);
11805 int do_ra
= ctx
->opcode
& (1 << 6);
11806 int do_s0
= ctx
->opcode
& (1 << 5);
11807 int do_s1
= ctx
->opcode
& (1 << 4);
11808 int framesize
= ctx
->opcode
& 0xf;
11810 if (framesize
== 0) {
11813 framesize
= framesize
<< 3;
11816 if (ctx
->opcode
& (1 << 7)) {
11817 gen_mips16_save(ctx
, 0, 0,
11818 do_ra
, do_s0
, do_s1
, framesize
);
11820 gen_mips16_restore(ctx
, 0, 0,
11821 do_ra
, do_s0
, do_s1
, framesize
);
11827 int rz
= xlat(ctx
->opcode
& 0x7);
11829 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11830 ((ctx
->opcode
>> 5) & 0x7);
11831 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11835 reg32
= ctx
->opcode
& 0x1f;
11836 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11839 generate_exception_end(ctx
, EXCP_RI
);
11846 int16_t imm
= (uint8_t) ctx
->opcode
;
11848 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11853 int16_t imm
= (uint8_t) ctx
->opcode
;
11854 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11857 #if defined(TARGET_MIPS64)
11859 check_insn(ctx
, ISA_MIPS3
);
11860 check_mips_64(ctx
);
11861 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11865 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11868 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11871 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11874 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11877 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11880 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11883 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11885 #if defined (TARGET_MIPS64)
11887 check_insn(ctx
, ISA_MIPS3
);
11888 check_mips_64(ctx
);
11889 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11893 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11896 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11899 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11902 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11906 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11909 switch (ctx
->opcode
& 0x3) {
11911 mips32_op
= OPC_ADDU
;
11914 mips32_op
= OPC_SUBU
;
11916 #if defined(TARGET_MIPS64)
11918 mips32_op
= OPC_DADDU
;
11919 check_insn(ctx
, ISA_MIPS3
);
11920 check_mips_64(ctx
);
11923 mips32_op
= OPC_DSUBU
;
11924 check_insn(ctx
, ISA_MIPS3
);
11925 check_mips_64(ctx
);
11929 generate_exception_end(ctx
, EXCP_RI
);
11933 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11942 int nd
= (ctx
->opcode
>> 7) & 0x1;
11943 int link
= (ctx
->opcode
>> 6) & 0x1;
11944 int ra
= (ctx
->opcode
>> 5) & 0x1;
11947 check_insn(ctx
, ISA_MIPS32
);
11956 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11961 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11962 gen_helper_do_semihosting(cpu_env
);
11964 /* XXX: not clear which exception should be raised
11965 * when in debug mode...
11967 check_insn(ctx
, ISA_MIPS32
);
11968 generate_exception_end(ctx
, EXCP_DBp
);
11972 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11975 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11978 generate_exception_end(ctx
, EXCP_BREAK
);
11981 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11984 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11987 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11989 #if defined (TARGET_MIPS64)
11991 check_insn(ctx
, ISA_MIPS3
);
11992 check_mips_64(ctx
);
11993 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11997 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
12000 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
12003 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
12006 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
12009 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12012 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12015 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12018 check_insn(ctx
, ISA_MIPS32
);
12020 case RR_RY_CNVT_ZEB
:
12021 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12023 case RR_RY_CNVT_ZEH
:
12024 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12026 case RR_RY_CNVT_SEB
:
12027 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12029 case RR_RY_CNVT_SEH
:
12030 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12032 #if defined (TARGET_MIPS64)
12033 case RR_RY_CNVT_ZEW
:
12034 check_insn(ctx
, ISA_MIPS64
);
12035 check_mips_64(ctx
);
12036 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12038 case RR_RY_CNVT_SEW
:
12039 check_insn(ctx
, ISA_MIPS64
);
12040 check_mips_64(ctx
);
12041 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12045 generate_exception_end(ctx
, EXCP_RI
);
12050 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12052 #if defined (TARGET_MIPS64)
12054 check_insn(ctx
, ISA_MIPS3
);
12055 check_mips_64(ctx
);
12056 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12059 check_insn(ctx
, ISA_MIPS3
);
12060 check_mips_64(ctx
);
12061 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12064 check_insn(ctx
, ISA_MIPS3
);
12065 check_mips_64(ctx
);
12066 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12069 check_insn(ctx
, ISA_MIPS3
);
12070 check_mips_64(ctx
);
12071 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12075 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12078 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12081 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12084 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12086 #if defined (TARGET_MIPS64)
12088 check_insn(ctx
, ISA_MIPS3
);
12089 check_mips_64(ctx
);
12090 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12093 check_insn(ctx
, ISA_MIPS3
);
12094 check_mips_64(ctx
);
12095 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12098 check_insn(ctx
, ISA_MIPS3
);
12099 check_mips_64(ctx
);
12100 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12103 check_insn(ctx
, ISA_MIPS3
);
12104 check_mips_64(ctx
);
12105 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12109 generate_exception_end(ctx
, EXCP_RI
);
12113 case M16_OPC_EXTEND
:
12114 decode_extended_mips16_opc(env
, ctx
);
12117 #if defined(TARGET_MIPS64)
12119 funct
= (ctx
->opcode
>> 8) & 0x7;
12120 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12124 generate_exception_end(ctx
, EXCP_RI
);
12131 /* microMIPS extension to MIPS32/MIPS64 */
12134 * microMIPS32/microMIPS64 major opcodes
12136 * 1. MIPS Architecture for Programmers Volume II-B:
12137 * The microMIPS32 Instruction Set (Revision 3.05)
12139 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12141 * 2. MIPS Architecture For Programmers Volume II-A:
12142 * The MIPS64 Instruction Set (Revision 3.51)
12172 POOL32S
= 0x16, /* MIPS64 */
12173 DADDIU32
= 0x17, /* MIPS64 */
12202 /* 0x29 is reserved */
12215 /* 0x31 is reserved */
12228 SD32
= 0x36, /* MIPS64 */
12229 LD32
= 0x37, /* MIPS64 */
12231 /* 0x39 is reserved */
12247 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12257 /* POOL32A encoding of minor opcode field */
12260 /* These opcodes are distinguished only by bits 9..6; those bits are
12261 * what are recorded below. */
12298 /* The following can be distinguished by their lower 6 bits. */
12308 /* POOL32AXF encoding of minor opcode field extension */
12311 * 1. MIPS Architecture for Programmers Volume II-B:
12312 * The microMIPS32 Instruction Set (Revision 3.05)
12314 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12316 * 2. MIPS Architecture for Programmers VolumeIV-e:
12317 * The MIPS DSP Application-Specific Extension
12318 * to the microMIPS32 Architecture (Revision 2.34)
12320 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12335 /* begin of microMIPS32 DSP */
12337 /* bits 13..12 for 0x01 */
12343 /* bits 13..12 for 0x2a */
12349 /* bits 13..12 for 0x32 */
12353 /* end of microMIPS32 DSP */
12355 /* bits 15..12 for 0x2c */
12372 /* bits 15..12 for 0x34 */
12380 /* bits 15..12 for 0x3c */
12382 JR
= 0x0, /* alias */
12390 /* bits 15..12 for 0x05 */
12394 /* bits 15..12 for 0x0d */
12406 /* bits 15..12 for 0x15 */
12412 /* bits 15..12 for 0x1d */
12416 /* bits 15..12 for 0x2d */
12421 /* bits 15..12 for 0x35 */
12428 /* POOL32B encoding of minor opcode field (bits 15..12) */
12444 /* POOL32C encoding of minor opcode field (bits 15..12) */
12452 /* 0xa is reserved */
12459 /* 0x6 is reserved */
12465 /* POOL32F encoding of minor opcode field (bits 5..0) */
12468 /* These are the bit 7..6 values */
12477 /* These are the bit 8..6 values */
12502 MOVZ_FMT_05
= 0x05,
12536 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12543 /* POOL32Fxf encoding of minor opcode extension field */
12581 /* POOL32I encoding of minor opcode field (bits 25..21) */
12611 /* These overlap and are distinguished by bit16 of the instruction */
12620 /* POOL16A encoding of minor opcode field */
12627 /* POOL16B encoding of minor opcode field */
12634 /* POOL16C encoding of minor opcode field */
12654 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12674 /* POOL16D encoding of minor opcode field */
12681 /* POOL16E encoding of minor opcode field */
12688 static int mmreg (int r
)
12690 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12695 /* Used for 16-bit store instructions. */
12696 static int mmreg2 (int r
)
12698 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12703 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12704 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12705 #define uMIPS_RS2(op) uMIPS_RS(op)
12706 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12707 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12708 #define uMIPS_RS5(op) (op & 0x1f)
12710 /* Signed immediate */
12711 #define SIMM(op, start, width) \
12712 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12715 /* Zero-extended immediate */
12716 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12718 static void gen_addiur1sp(DisasContext
*ctx
)
12720 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12722 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12725 static void gen_addiur2(DisasContext
*ctx
)
12727 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12728 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12729 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12731 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12734 static void gen_addiusp(DisasContext
*ctx
)
12736 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12739 if (encoded
<= 1) {
12740 decoded
= 256 + encoded
;
12741 } else if (encoded
<= 255) {
12743 } else if (encoded
<= 509) {
12744 decoded
= encoded
- 512;
12746 decoded
= encoded
- 768;
12749 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12752 static void gen_addius5(DisasContext
*ctx
)
12754 int imm
= SIMM(ctx
->opcode
, 1, 4);
12755 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12757 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12760 static void gen_andi16(DisasContext
*ctx
)
12762 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12763 31, 32, 63, 64, 255, 32768, 65535 };
12764 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12765 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12766 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12768 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12771 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12772 int base
, int16_t offset
)
12777 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12778 generate_exception_end(ctx
, EXCP_RI
);
12782 t0
= tcg_temp_new();
12784 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12786 t1
= tcg_const_tl(reglist
);
12787 t2
= tcg_const_i32(ctx
->mem_idx
);
12789 save_cpu_state(ctx
, 1);
12792 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12795 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12797 #ifdef TARGET_MIPS64
12799 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12802 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12808 tcg_temp_free_i32(t2
);
12812 static void gen_pool16c_insn(DisasContext
*ctx
)
12814 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12815 int rs
= mmreg(ctx
->opcode
& 0x7);
12817 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12822 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12828 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12834 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12840 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12847 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12848 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12850 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12859 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12860 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12862 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12869 int reg
= ctx
->opcode
& 0x1f;
12871 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12877 int reg
= ctx
->opcode
& 0x1f;
12878 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12879 /* Let normal delay slot handling in our caller take us
12880 to the branch target. */
12885 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12886 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12890 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12891 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12895 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12899 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12902 generate_exception_end(ctx
, EXCP_BREAK
);
12905 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12906 gen_helper_do_semihosting(cpu_env
);
12908 /* XXX: not clear which exception should be raised
12909 * when in debug mode...
12911 check_insn(ctx
, ISA_MIPS32
);
12912 generate_exception_end(ctx
, EXCP_DBp
);
12915 case JRADDIUSP
+ 0:
12916 case JRADDIUSP
+ 1:
12918 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12919 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12920 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12921 /* Let normal delay slot handling in our caller take us
12922 to the branch target. */
12926 generate_exception_end(ctx
, EXCP_RI
);
12931 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12934 int rd
, rs
, re
, rt
;
12935 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12936 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12937 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12938 rd
= rd_enc
[enc_dest
];
12939 re
= re_enc
[enc_dest
];
12940 rs
= rs_rt_enc
[enc_rs
];
12941 rt
= rs_rt_enc
[enc_rt
];
12943 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12945 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12948 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12950 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12954 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12956 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12957 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12959 switch (ctx
->opcode
& 0xf) {
12961 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12964 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12968 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12969 int offset
= extract32(ctx
->opcode
, 4, 4);
12970 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12973 case R6_JRC16
: /* JRCADDIUSP */
12974 if ((ctx
->opcode
>> 4) & 1) {
12976 int imm
= extract32(ctx
->opcode
, 5, 5);
12977 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12978 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12981 int rs
= extract32(ctx
->opcode
, 5, 5);
12982 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12985 case MOVEP
... MOVEP_07
:
12986 case MOVEP_0C
... MOVEP_0F
:
12988 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12989 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12990 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12991 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12995 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12998 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
13002 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
13003 int offset
= extract32(ctx
->opcode
, 4, 4);
13004 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
13007 case JALRC16
: /* BREAK16, SDBBP16 */
13008 switch (ctx
->opcode
& 0x3f) {
13010 case JALRC16
+ 0x20:
13012 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13017 generate_exception(ctx
, EXCP_BREAK
);
13021 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13022 gen_helper_do_semihosting(cpu_env
);
13024 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13025 generate_exception(ctx
, EXCP_RI
);
13027 generate_exception(ctx
, EXCP_DBp
);
13034 generate_exception(ctx
, EXCP_RI
);
13039 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13041 TCGv t0
= tcg_temp_new();
13042 TCGv t1
= tcg_temp_new();
13044 gen_load_gpr(t0
, base
);
13047 gen_load_gpr(t1
, index
);
13048 tcg_gen_shli_tl(t1
, t1
, 2);
13049 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13052 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13053 gen_store_gpr(t1
, rd
);
13059 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13060 int base
, int16_t offset
)
13064 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13065 generate_exception_end(ctx
, EXCP_RI
);
13069 t0
= tcg_temp_new();
13070 t1
= tcg_temp_new();
13072 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13077 generate_exception_end(ctx
, EXCP_RI
);
13080 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13081 gen_store_gpr(t1
, rd
);
13082 tcg_gen_movi_tl(t1
, 4);
13083 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13084 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13085 gen_store_gpr(t1
, rd
+1);
13088 gen_load_gpr(t1
, rd
);
13089 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13090 tcg_gen_movi_tl(t1
, 4);
13091 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13092 gen_load_gpr(t1
, rd
+1);
13093 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13095 #ifdef TARGET_MIPS64
13098 generate_exception_end(ctx
, EXCP_RI
);
13101 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13102 gen_store_gpr(t1
, rd
);
13103 tcg_gen_movi_tl(t1
, 8);
13104 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13105 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13106 gen_store_gpr(t1
, rd
+1);
13109 gen_load_gpr(t1
, rd
);
13110 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13111 tcg_gen_movi_tl(t1
, 8);
13112 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13113 gen_load_gpr(t1
, rd
+1);
13114 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13122 static void gen_sync(int stype
)
13124 TCGBar tcg_mo
= TCG_BAR_SC
;
13127 case 0x4: /* SYNC_WMB */
13128 tcg_mo
|= TCG_MO_ST_ST
;
13130 case 0x10: /* SYNC_MB */
13131 tcg_mo
|= TCG_MO_ALL
;
13133 case 0x11: /* SYNC_ACQUIRE */
13134 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
13136 case 0x12: /* SYNC_RELEASE */
13137 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
13139 case 0x13: /* SYNC_RMB */
13140 tcg_mo
|= TCG_MO_LD_LD
;
13143 tcg_mo
|= TCG_MO_ALL
;
13147 tcg_gen_mb(tcg_mo
);
13150 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13152 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13153 int minor
= (ctx
->opcode
>> 12) & 0xf;
13154 uint32_t mips32_op
;
13156 switch (extension
) {
13158 mips32_op
= OPC_TEQ
;
13161 mips32_op
= OPC_TGE
;
13164 mips32_op
= OPC_TGEU
;
13167 mips32_op
= OPC_TLT
;
13170 mips32_op
= OPC_TLTU
;
13173 mips32_op
= OPC_TNE
;
13175 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13177 #ifndef CONFIG_USER_ONLY
13180 check_cp0_enabled(ctx
);
13182 /* Treat as NOP. */
13185 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13189 check_cp0_enabled(ctx
);
13191 TCGv t0
= tcg_temp_new();
13193 gen_load_gpr(t0
, rt
);
13194 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13200 switch (minor
& 3) {
13202 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13205 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13208 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13211 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13214 goto pool32axf_invalid
;
13218 switch (minor
& 3) {
13220 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13223 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13226 goto pool32axf_invalid
;
13232 check_insn(ctx
, ISA_MIPS32R6
);
13233 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13236 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13239 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13242 mips32_op
= OPC_CLO
;
13245 mips32_op
= OPC_CLZ
;
13247 check_insn(ctx
, ISA_MIPS32
);
13248 gen_cl(ctx
, mips32_op
, rt
, rs
);
13251 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13252 gen_rdhwr(ctx
, rt
, rs
, 0);
13255 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13258 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13259 mips32_op
= OPC_MULT
;
13262 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13263 mips32_op
= OPC_MULTU
;
13266 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13267 mips32_op
= OPC_DIV
;
13270 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13271 mips32_op
= OPC_DIVU
;
13274 check_insn(ctx
, ISA_MIPS32
);
13275 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13278 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13279 mips32_op
= OPC_MADD
;
13282 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13283 mips32_op
= OPC_MADDU
;
13286 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13287 mips32_op
= OPC_MSUB
;
13290 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13291 mips32_op
= OPC_MSUBU
;
13293 check_insn(ctx
, ISA_MIPS32
);
13294 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13297 goto pool32axf_invalid
;
13308 generate_exception_err(ctx
, EXCP_CpU
, 2);
13311 goto pool32axf_invalid
;
13316 case JALR
: /* JALRC */
13317 case JALR_HB
: /* JALRC_HB */
13318 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13319 /* JALRC, JALRC_HB */
13320 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13322 /* JALR, JALR_HB */
13323 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13324 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13329 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13330 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13331 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13334 goto pool32axf_invalid
;
13340 check_cp0_enabled(ctx
);
13341 check_insn(ctx
, ISA_MIPS32R2
);
13342 gen_load_srsgpr(rs
, rt
);
13345 check_cp0_enabled(ctx
);
13346 check_insn(ctx
, ISA_MIPS32R2
);
13347 gen_store_srsgpr(rs
, rt
);
13350 goto pool32axf_invalid
;
13353 #ifndef CONFIG_USER_ONLY
13357 mips32_op
= OPC_TLBP
;
13360 mips32_op
= OPC_TLBR
;
13363 mips32_op
= OPC_TLBWI
;
13366 mips32_op
= OPC_TLBWR
;
13369 mips32_op
= OPC_TLBINV
;
13372 mips32_op
= OPC_TLBINVF
;
13375 mips32_op
= OPC_WAIT
;
13378 mips32_op
= OPC_DERET
;
13381 mips32_op
= OPC_ERET
;
13383 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13386 goto pool32axf_invalid
;
13392 check_cp0_enabled(ctx
);
13394 TCGv t0
= tcg_temp_new();
13396 save_cpu_state(ctx
, 1);
13397 gen_helper_di(t0
, cpu_env
);
13398 gen_store_gpr(t0
, rs
);
13399 /* Stop translation as we may have switched the execution mode */
13400 ctx
->bstate
= BS_STOP
;
13405 check_cp0_enabled(ctx
);
13407 TCGv t0
= tcg_temp_new();
13409 save_cpu_state(ctx
, 1);
13410 gen_helper_ei(t0
, cpu_env
);
13411 gen_store_gpr(t0
, rs
);
13412 /* Stop translation as we may have switched the execution mode */
13413 ctx
->bstate
= BS_STOP
;
13418 goto pool32axf_invalid
;
13425 gen_sync(extract32(ctx
->opcode
, 16, 5));
13428 generate_exception_end(ctx
, EXCP_SYSCALL
);
13431 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13432 gen_helper_do_semihosting(cpu_env
);
13434 check_insn(ctx
, ISA_MIPS32
);
13435 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13436 generate_exception_end(ctx
, EXCP_RI
);
13438 generate_exception_end(ctx
, EXCP_DBp
);
13443 goto pool32axf_invalid
;
13447 switch (minor
& 3) {
13449 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13452 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13455 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13458 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13461 goto pool32axf_invalid
;
13465 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13468 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13471 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13474 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13477 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13480 goto pool32axf_invalid
;
13485 MIPS_INVAL("pool32axf");
13486 generate_exception_end(ctx
, EXCP_RI
);
13491 /* Values for microMIPS fmt field. Variable-width, depending on which
13492 formats the instruction supports. */
13511 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13513 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13514 uint32_t mips32_op
;
13516 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13517 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13518 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13520 switch (extension
) {
13521 case FLOAT_1BIT_FMT(CFC1
, 0):
13522 mips32_op
= OPC_CFC1
;
13524 case FLOAT_1BIT_FMT(CTC1
, 0):
13525 mips32_op
= OPC_CTC1
;
13527 case FLOAT_1BIT_FMT(MFC1
, 0):
13528 mips32_op
= OPC_MFC1
;
13530 case FLOAT_1BIT_FMT(MTC1
, 0):
13531 mips32_op
= OPC_MTC1
;
13533 case FLOAT_1BIT_FMT(MFHC1
, 0):
13534 mips32_op
= OPC_MFHC1
;
13536 case FLOAT_1BIT_FMT(MTHC1
, 0):
13537 mips32_op
= OPC_MTHC1
;
13539 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13542 /* Reciprocal square root */
13543 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13544 mips32_op
= OPC_RSQRT_S
;
13546 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13547 mips32_op
= OPC_RSQRT_D
;
13551 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13552 mips32_op
= OPC_SQRT_S
;
13554 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13555 mips32_op
= OPC_SQRT_D
;
13559 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13560 mips32_op
= OPC_RECIP_S
;
13562 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13563 mips32_op
= OPC_RECIP_D
;
13567 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13568 mips32_op
= OPC_FLOOR_L_S
;
13570 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13571 mips32_op
= OPC_FLOOR_L_D
;
13573 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13574 mips32_op
= OPC_FLOOR_W_S
;
13576 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13577 mips32_op
= OPC_FLOOR_W_D
;
13581 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13582 mips32_op
= OPC_CEIL_L_S
;
13584 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13585 mips32_op
= OPC_CEIL_L_D
;
13587 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13588 mips32_op
= OPC_CEIL_W_S
;
13590 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13591 mips32_op
= OPC_CEIL_W_D
;
13595 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13596 mips32_op
= OPC_TRUNC_L_S
;
13598 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13599 mips32_op
= OPC_TRUNC_L_D
;
13601 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13602 mips32_op
= OPC_TRUNC_W_S
;
13604 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13605 mips32_op
= OPC_TRUNC_W_D
;
13609 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13610 mips32_op
= OPC_ROUND_L_S
;
13612 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13613 mips32_op
= OPC_ROUND_L_D
;
13615 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13616 mips32_op
= OPC_ROUND_W_S
;
13618 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13619 mips32_op
= OPC_ROUND_W_D
;
13622 /* Integer to floating-point conversion */
13623 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13624 mips32_op
= OPC_CVT_L_S
;
13626 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13627 mips32_op
= OPC_CVT_L_D
;
13629 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13630 mips32_op
= OPC_CVT_W_S
;
13632 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13633 mips32_op
= OPC_CVT_W_D
;
13636 /* Paired-foo conversions */
13637 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13638 mips32_op
= OPC_CVT_S_PL
;
13640 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13641 mips32_op
= OPC_CVT_S_PU
;
13643 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13644 mips32_op
= OPC_CVT_PW_PS
;
13646 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13647 mips32_op
= OPC_CVT_PS_PW
;
13650 /* Floating-point moves */
13651 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13652 mips32_op
= OPC_MOV_S
;
13654 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13655 mips32_op
= OPC_MOV_D
;
13657 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13658 mips32_op
= OPC_MOV_PS
;
13661 /* Absolute value */
13662 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13663 mips32_op
= OPC_ABS_S
;
13665 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13666 mips32_op
= OPC_ABS_D
;
13668 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13669 mips32_op
= OPC_ABS_PS
;
13673 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13674 mips32_op
= OPC_NEG_S
;
13676 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13677 mips32_op
= OPC_NEG_D
;
13679 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13680 mips32_op
= OPC_NEG_PS
;
13683 /* Reciprocal square root step */
13684 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13685 mips32_op
= OPC_RSQRT1_S
;
13687 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13688 mips32_op
= OPC_RSQRT1_D
;
13690 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13691 mips32_op
= OPC_RSQRT1_PS
;
13694 /* Reciprocal step */
13695 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13696 mips32_op
= OPC_RECIP1_S
;
13698 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13699 mips32_op
= OPC_RECIP1_S
;
13701 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13702 mips32_op
= OPC_RECIP1_PS
;
13705 /* Conversions from double */
13706 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13707 mips32_op
= OPC_CVT_D_S
;
13709 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13710 mips32_op
= OPC_CVT_D_W
;
13712 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13713 mips32_op
= OPC_CVT_D_L
;
13716 /* Conversions from single */
13717 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13718 mips32_op
= OPC_CVT_S_D
;
13720 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13721 mips32_op
= OPC_CVT_S_W
;
13723 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13724 mips32_op
= OPC_CVT_S_L
;
13726 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13729 /* Conditional moves on floating-point codes */
13730 case COND_FLOAT_MOV(MOVT
, 0):
13731 case COND_FLOAT_MOV(MOVT
, 1):
13732 case COND_FLOAT_MOV(MOVT
, 2):
13733 case COND_FLOAT_MOV(MOVT
, 3):
13734 case COND_FLOAT_MOV(MOVT
, 4):
13735 case COND_FLOAT_MOV(MOVT
, 5):
13736 case COND_FLOAT_MOV(MOVT
, 6):
13737 case COND_FLOAT_MOV(MOVT
, 7):
13738 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13739 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13741 case COND_FLOAT_MOV(MOVF
, 0):
13742 case COND_FLOAT_MOV(MOVF
, 1):
13743 case COND_FLOAT_MOV(MOVF
, 2):
13744 case COND_FLOAT_MOV(MOVF
, 3):
13745 case COND_FLOAT_MOV(MOVF
, 4):
13746 case COND_FLOAT_MOV(MOVF
, 5):
13747 case COND_FLOAT_MOV(MOVF
, 6):
13748 case COND_FLOAT_MOV(MOVF
, 7):
13749 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13750 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13753 MIPS_INVAL("pool32fxf");
13754 generate_exception_end(ctx
, EXCP_RI
);
13759 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13763 int rt
, rs
, rd
, rr
;
13765 uint32_t op
, minor
, mips32_op
;
13766 uint32_t cond
, fmt
, cc
;
13768 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13769 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13771 rt
= (ctx
->opcode
>> 21) & 0x1f;
13772 rs
= (ctx
->opcode
>> 16) & 0x1f;
13773 rd
= (ctx
->opcode
>> 11) & 0x1f;
13774 rr
= (ctx
->opcode
>> 6) & 0x1f;
13775 imm
= (int16_t) ctx
->opcode
;
13777 op
= (ctx
->opcode
>> 26) & 0x3f;
13780 minor
= ctx
->opcode
& 0x3f;
13783 minor
= (ctx
->opcode
>> 6) & 0xf;
13786 mips32_op
= OPC_SLL
;
13789 mips32_op
= OPC_SRA
;
13792 mips32_op
= OPC_SRL
;
13795 mips32_op
= OPC_ROTR
;
13797 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13800 check_insn(ctx
, ISA_MIPS32R6
);
13801 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13804 check_insn(ctx
, ISA_MIPS32R6
);
13805 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13808 check_insn(ctx
, ISA_MIPS32R6
);
13809 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13812 goto pool32a_invalid
;
13816 minor
= (ctx
->opcode
>> 6) & 0xf;
13820 mips32_op
= OPC_ADD
;
13823 mips32_op
= OPC_ADDU
;
13826 mips32_op
= OPC_SUB
;
13829 mips32_op
= OPC_SUBU
;
13832 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13833 mips32_op
= OPC_MUL
;
13835 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13839 mips32_op
= OPC_SLLV
;
13842 mips32_op
= OPC_SRLV
;
13845 mips32_op
= OPC_SRAV
;
13848 mips32_op
= OPC_ROTRV
;
13850 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13852 /* Logical operations */
13854 mips32_op
= OPC_AND
;
13857 mips32_op
= OPC_OR
;
13860 mips32_op
= OPC_NOR
;
13863 mips32_op
= OPC_XOR
;
13865 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13867 /* Set less than */
13869 mips32_op
= OPC_SLT
;
13872 mips32_op
= OPC_SLTU
;
13874 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13877 goto pool32a_invalid
;
13881 minor
= (ctx
->opcode
>> 6) & 0xf;
13883 /* Conditional moves */
13884 case MOVN
: /* MUL */
13885 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13887 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13890 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13893 case MOVZ
: /* MUH */
13894 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13896 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13899 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13903 check_insn(ctx
, ISA_MIPS32R6
);
13904 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13907 check_insn(ctx
, ISA_MIPS32R6
);
13908 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13910 case LWXS
: /* DIV */
13911 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13913 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13916 gen_ldxs(ctx
, rs
, rt
, rd
);
13920 check_insn(ctx
, ISA_MIPS32R6
);
13921 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13924 check_insn(ctx
, ISA_MIPS32R6
);
13925 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13928 check_insn(ctx
, ISA_MIPS32R6
);
13929 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13932 goto pool32a_invalid
;
13936 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13939 check_insn(ctx
, ISA_MIPS32R6
);
13940 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13941 extract32(ctx
->opcode
, 9, 2));
13944 check_insn(ctx
, ISA_MIPS32R6
);
13945 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13946 extract32(ctx
->opcode
, 9, 2));
13949 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13952 gen_pool32axf(env
, ctx
, rt
, rs
);
13955 generate_exception_end(ctx
, EXCP_BREAK
);
13958 check_insn(ctx
, ISA_MIPS32R6
);
13959 generate_exception_end(ctx
, EXCP_RI
);
13963 MIPS_INVAL("pool32a");
13964 generate_exception_end(ctx
, EXCP_RI
);
13969 minor
= (ctx
->opcode
>> 12) & 0xf;
13972 check_cp0_enabled(ctx
);
13973 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13974 gen_cache_operation(ctx
, rt
, rs
, imm
);
13979 /* COP2: Not implemented. */
13980 generate_exception_err(ctx
, EXCP_CpU
, 2);
13982 #ifdef TARGET_MIPS64
13985 check_insn(ctx
, ISA_MIPS3
);
13986 check_mips_64(ctx
);
13991 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13993 #ifdef TARGET_MIPS64
13996 check_insn(ctx
, ISA_MIPS3
);
13997 check_mips_64(ctx
);
14002 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14005 MIPS_INVAL("pool32b");
14006 generate_exception_end(ctx
, EXCP_RI
);
14011 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
14012 minor
= ctx
->opcode
& 0x3f;
14013 check_cp1_enabled(ctx
);
14016 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14017 mips32_op
= OPC_ALNV_PS
;
14020 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14021 mips32_op
= OPC_MADD_S
;
14024 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14025 mips32_op
= OPC_MADD_D
;
14028 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14029 mips32_op
= OPC_MADD_PS
;
14032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14033 mips32_op
= OPC_MSUB_S
;
14036 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14037 mips32_op
= OPC_MSUB_D
;
14040 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14041 mips32_op
= OPC_MSUB_PS
;
14044 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14045 mips32_op
= OPC_NMADD_S
;
14048 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14049 mips32_op
= OPC_NMADD_D
;
14052 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14053 mips32_op
= OPC_NMADD_PS
;
14056 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14057 mips32_op
= OPC_NMSUB_S
;
14060 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14061 mips32_op
= OPC_NMSUB_D
;
14064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14065 mips32_op
= OPC_NMSUB_PS
;
14067 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14069 case CABS_COND_FMT
:
14070 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14071 cond
= (ctx
->opcode
>> 6) & 0xf;
14072 cc
= (ctx
->opcode
>> 13) & 0x7;
14073 fmt
= (ctx
->opcode
>> 10) & 0x3;
14076 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14079 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14082 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14085 goto pool32f_invalid
;
14089 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14090 cond
= (ctx
->opcode
>> 6) & 0xf;
14091 cc
= (ctx
->opcode
>> 13) & 0x7;
14092 fmt
= (ctx
->opcode
>> 10) & 0x3;
14095 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14098 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14101 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14104 goto pool32f_invalid
;
14108 check_insn(ctx
, ISA_MIPS32R6
);
14109 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14112 check_insn(ctx
, ISA_MIPS32R6
);
14113 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14116 gen_pool32fxf(ctx
, rt
, rs
);
14120 switch ((ctx
->opcode
>> 6) & 0x7) {
14122 mips32_op
= OPC_PLL_PS
;
14125 mips32_op
= OPC_PLU_PS
;
14128 mips32_op
= OPC_PUL_PS
;
14131 mips32_op
= OPC_PUU_PS
;
14134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14135 mips32_op
= OPC_CVT_PS_S
;
14137 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14140 goto pool32f_invalid
;
14144 check_insn(ctx
, ISA_MIPS32R6
);
14145 switch ((ctx
->opcode
>> 9) & 0x3) {
14147 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14150 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14153 goto pool32f_invalid
;
14158 switch ((ctx
->opcode
>> 6) & 0x7) {
14160 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14161 mips32_op
= OPC_LWXC1
;
14164 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14165 mips32_op
= OPC_SWXC1
;
14168 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14169 mips32_op
= OPC_LDXC1
;
14172 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14173 mips32_op
= OPC_SDXC1
;
14176 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14177 mips32_op
= OPC_LUXC1
;
14180 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14181 mips32_op
= OPC_SUXC1
;
14183 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14186 goto pool32f_invalid
;
14190 check_insn(ctx
, ISA_MIPS32R6
);
14191 switch ((ctx
->opcode
>> 9) & 0x3) {
14193 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14196 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14199 goto pool32f_invalid
;
14204 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14205 fmt
= (ctx
->opcode
>> 9) & 0x3;
14206 switch ((ctx
->opcode
>> 6) & 0x7) {
14210 mips32_op
= OPC_RSQRT2_S
;
14213 mips32_op
= OPC_RSQRT2_D
;
14216 mips32_op
= OPC_RSQRT2_PS
;
14219 goto pool32f_invalid
;
14225 mips32_op
= OPC_RECIP2_S
;
14228 mips32_op
= OPC_RECIP2_D
;
14231 mips32_op
= OPC_RECIP2_PS
;
14234 goto pool32f_invalid
;
14238 mips32_op
= OPC_ADDR_PS
;
14241 mips32_op
= OPC_MULR_PS
;
14243 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14246 goto pool32f_invalid
;
14250 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14251 cc
= (ctx
->opcode
>> 13) & 0x7;
14252 fmt
= (ctx
->opcode
>> 9) & 0x3;
14253 switch ((ctx
->opcode
>> 6) & 0x7) {
14254 case MOVF_FMT
: /* RINT_FMT */
14255 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14259 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14262 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14265 goto pool32f_invalid
;
14271 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14274 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14278 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14281 goto pool32f_invalid
;
14285 case MOVT_FMT
: /* CLASS_FMT */
14286 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14290 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14293 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14296 goto pool32f_invalid
;
14302 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14305 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14309 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14312 goto pool32f_invalid
;
14317 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14320 goto pool32f_invalid
;
14323 #define FINSN_3ARG_SDPS(prfx) \
14324 switch ((ctx->opcode >> 8) & 0x3) { \
14326 mips32_op = OPC_##prfx##_S; \
14329 mips32_op = OPC_##prfx##_D; \
14331 case FMT_SDPS_PS: \
14333 mips32_op = OPC_##prfx##_PS; \
14336 goto pool32f_invalid; \
14339 check_insn(ctx
, ISA_MIPS32R6
);
14340 switch ((ctx
->opcode
>> 9) & 0x3) {
14342 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14345 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14348 goto pool32f_invalid
;
14352 check_insn(ctx
, ISA_MIPS32R6
);
14353 switch ((ctx
->opcode
>> 9) & 0x3) {
14355 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14358 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14361 goto pool32f_invalid
;
14365 /* regular FP ops */
14366 switch ((ctx
->opcode
>> 6) & 0x3) {
14368 FINSN_3ARG_SDPS(ADD
);
14371 FINSN_3ARG_SDPS(SUB
);
14374 FINSN_3ARG_SDPS(MUL
);
14377 fmt
= (ctx
->opcode
>> 8) & 0x3;
14379 mips32_op
= OPC_DIV_D
;
14380 } else if (fmt
== 0) {
14381 mips32_op
= OPC_DIV_S
;
14383 goto pool32f_invalid
;
14387 goto pool32f_invalid
;
14392 switch ((ctx
->opcode
>> 6) & 0x7) {
14393 case MOVN_FMT
: /* SELNEZ_FMT */
14394 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14396 switch ((ctx
->opcode
>> 9) & 0x3) {
14398 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14401 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14404 goto pool32f_invalid
;
14408 FINSN_3ARG_SDPS(MOVN
);
14412 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14413 FINSN_3ARG_SDPS(MOVN
);
14415 case MOVZ_FMT
: /* SELEQZ_FMT */
14416 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14418 switch ((ctx
->opcode
>> 9) & 0x3) {
14420 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14423 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14426 goto pool32f_invalid
;
14430 FINSN_3ARG_SDPS(MOVZ
);
14434 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14435 FINSN_3ARG_SDPS(MOVZ
);
14438 check_insn(ctx
, ISA_MIPS32R6
);
14439 switch ((ctx
->opcode
>> 9) & 0x3) {
14441 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14444 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14447 goto pool32f_invalid
;
14451 check_insn(ctx
, ISA_MIPS32R6
);
14452 switch ((ctx
->opcode
>> 9) & 0x3) {
14454 mips32_op
= OPC_MADDF_S
;
14457 mips32_op
= OPC_MADDF_D
;
14460 goto pool32f_invalid
;
14464 check_insn(ctx
, ISA_MIPS32R6
);
14465 switch ((ctx
->opcode
>> 9) & 0x3) {
14467 mips32_op
= OPC_MSUBF_S
;
14470 mips32_op
= OPC_MSUBF_D
;
14473 goto pool32f_invalid
;
14477 goto pool32f_invalid
;
14481 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14485 MIPS_INVAL("pool32f");
14486 generate_exception_end(ctx
, EXCP_RI
);
14490 generate_exception_err(ctx
, EXCP_CpU
, 1);
14494 minor
= (ctx
->opcode
>> 21) & 0x1f;
14497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14498 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14502 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14503 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14506 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14507 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14508 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14512 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14516 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14517 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14521 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14522 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14525 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14526 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14529 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14530 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14534 case TLTI
: /* BC1EQZC */
14535 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14537 check_cp1_enabled(ctx
);
14538 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14541 mips32_op
= OPC_TLTI
;
14545 case TGEI
: /* BC1NEZC */
14546 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14548 check_cp1_enabled(ctx
);
14549 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14552 mips32_op
= OPC_TGEI
;
14557 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14558 mips32_op
= OPC_TLTIU
;
14561 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14562 mips32_op
= OPC_TGEIU
;
14564 case TNEI
: /* SYNCI */
14565 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14567 /* Break the TB to be able to sync copied instructions
14569 ctx
->bstate
= BS_STOP
;
14572 mips32_op
= OPC_TNEI
;
14577 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14578 mips32_op
= OPC_TEQI
;
14580 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14585 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14586 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14587 4, rs
, 0, imm
<< 1, 0);
14588 /* Compact branches don't have a delay slot, so just let
14589 the normal delay slot handling take us to the branch
14593 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14594 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14597 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14598 /* Break the TB to be able to sync copied instructions
14600 ctx
->bstate
= BS_STOP
;
14604 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14605 /* COP2: Not implemented. */
14606 generate_exception_err(ctx
, EXCP_CpU
, 2);
14609 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14610 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14613 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14614 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14617 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14618 mips32_op
= OPC_BC1FANY4
;
14621 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14622 mips32_op
= OPC_BC1TANY4
;
14625 check_insn(ctx
, ASE_MIPS3D
);
14628 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14629 check_cp1_enabled(ctx
);
14630 gen_compute_branch1(ctx
, mips32_op
,
14631 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14633 generate_exception_err(ctx
, EXCP_CpU
, 1);
14638 /* MIPS DSP: not implemented */
14641 MIPS_INVAL("pool32i");
14642 generate_exception_end(ctx
, EXCP_RI
);
14647 minor
= (ctx
->opcode
>> 12) & 0xf;
14648 offset
= sextract32(ctx
->opcode
, 0,
14649 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14652 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14653 mips32_op
= OPC_LWL
;
14656 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14657 mips32_op
= OPC_SWL
;
14660 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14661 mips32_op
= OPC_LWR
;
14664 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14665 mips32_op
= OPC_SWR
;
14667 #if defined(TARGET_MIPS64)
14669 check_insn(ctx
, ISA_MIPS3
);
14670 check_mips_64(ctx
);
14671 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14672 mips32_op
= OPC_LDL
;
14675 check_insn(ctx
, ISA_MIPS3
);
14676 check_mips_64(ctx
);
14677 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14678 mips32_op
= OPC_SDL
;
14681 check_insn(ctx
, ISA_MIPS3
);
14682 check_mips_64(ctx
);
14683 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14684 mips32_op
= OPC_LDR
;
14687 check_insn(ctx
, ISA_MIPS3
);
14688 check_mips_64(ctx
);
14689 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14690 mips32_op
= OPC_SDR
;
14693 check_insn(ctx
, ISA_MIPS3
);
14694 check_mips_64(ctx
);
14695 mips32_op
= OPC_LWU
;
14698 check_insn(ctx
, ISA_MIPS3
);
14699 check_mips_64(ctx
);
14700 mips32_op
= OPC_LLD
;
14704 mips32_op
= OPC_LL
;
14707 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14710 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14713 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14715 #if defined(TARGET_MIPS64)
14717 check_insn(ctx
, ISA_MIPS3
);
14718 check_mips_64(ctx
);
14719 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14723 /* Treat as no-op */
14724 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14725 /* hint codes 24-31 are reserved and signal RI */
14726 generate_exception(ctx
, EXCP_RI
);
14730 MIPS_INVAL("pool32c");
14731 generate_exception_end(ctx
, EXCP_RI
);
14735 case ADDI32
: /* AUI, LUI */
14736 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14738 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14741 mips32_op
= OPC_ADDI
;
14746 mips32_op
= OPC_ADDIU
;
14748 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14751 /* Logical operations */
14753 mips32_op
= OPC_ORI
;
14756 mips32_op
= OPC_XORI
;
14759 mips32_op
= OPC_ANDI
;
14761 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14764 /* Set less than immediate */
14766 mips32_op
= OPC_SLTI
;
14769 mips32_op
= OPC_SLTIU
;
14771 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14774 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14775 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14776 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14777 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14779 case JALS32
: /* BOVC, BEQC, BEQZALC */
14780 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14783 mips32_op
= OPC_BOVC
;
14784 } else if (rs
< rt
&& rs
== 0) {
14786 mips32_op
= OPC_BEQZALC
;
14789 mips32_op
= OPC_BEQC
;
14791 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14794 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14795 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14796 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14799 case BEQ32
: /* BC */
14800 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14802 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14803 sextract32(ctx
->opcode
<< 1, 0, 27));
14806 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14809 case BNE32
: /* BALC */
14810 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14812 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14813 sextract32(ctx
->opcode
<< 1, 0, 27));
14816 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14819 case J32
: /* BGTZC, BLTZC, BLTC */
14820 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14821 if (rs
== 0 && rt
!= 0) {
14823 mips32_op
= OPC_BGTZC
;
14824 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14826 mips32_op
= OPC_BLTZC
;
14829 mips32_op
= OPC_BLTC
;
14831 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14834 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14835 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14838 case JAL32
: /* BLEZC, BGEZC, BGEC */
14839 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14840 if (rs
== 0 && rt
!= 0) {
14842 mips32_op
= OPC_BLEZC
;
14843 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14845 mips32_op
= OPC_BGEZC
;
14848 mips32_op
= OPC_BGEC
;
14850 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14853 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14854 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14855 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14858 /* Floating point (COP1) */
14860 mips32_op
= OPC_LWC1
;
14863 mips32_op
= OPC_LDC1
;
14866 mips32_op
= OPC_SWC1
;
14869 mips32_op
= OPC_SDC1
;
14871 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14873 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14874 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14875 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14876 switch ((ctx
->opcode
>> 16) & 0x1f) {
14877 case ADDIUPC_00
... ADDIUPC_07
:
14878 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14881 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14884 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14886 case LWPC_08
... LWPC_0F
:
14887 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14890 generate_exception(ctx
, EXCP_RI
);
14895 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14896 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14898 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14901 case BNVC
: /* BNEC, BNEZALC */
14902 check_insn(ctx
, ISA_MIPS32R6
);
14905 mips32_op
= OPC_BNVC
;
14906 } else if (rs
< rt
&& rs
== 0) {
14908 mips32_op
= OPC_BNEZALC
;
14911 mips32_op
= OPC_BNEC
;
14913 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14915 case R6_BNEZC
: /* JIALC */
14916 check_insn(ctx
, ISA_MIPS32R6
);
14919 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14920 sextract32(ctx
->opcode
<< 1, 0, 22));
14923 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14926 case R6_BEQZC
: /* JIC */
14927 check_insn(ctx
, ISA_MIPS32R6
);
14930 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14931 sextract32(ctx
->opcode
<< 1, 0, 22));
14934 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14937 case BLEZALC
: /* BGEZALC, BGEUC */
14938 check_insn(ctx
, ISA_MIPS32R6
);
14939 if (rs
== 0 && rt
!= 0) {
14941 mips32_op
= OPC_BLEZALC
;
14942 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14944 mips32_op
= OPC_BGEZALC
;
14947 mips32_op
= OPC_BGEUC
;
14949 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14951 case BGTZALC
: /* BLTZALC, BLTUC */
14952 check_insn(ctx
, ISA_MIPS32R6
);
14953 if (rs
== 0 && rt
!= 0) {
14955 mips32_op
= OPC_BGTZALC
;
14956 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14958 mips32_op
= OPC_BLTZALC
;
14961 mips32_op
= OPC_BLTUC
;
14963 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14965 /* Loads and stores */
14967 mips32_op
= OPC_LB
;
14970 mips32_op
= OPC_LBU
;
14973 mips32_op
= OPC_LH
;
14976 mips32_op
= OPC_LHU
;
14979 mips32_op
= OPC_LW
;
14981 #ifdef TARGET_MIPS64
14983 check_insn(ctx
, ISA_MIPS3
);
14984 check_mips_64(ctx
);
14985 mips32_op
= OPC_LD
;
14988 check_insn(ctx
, ISA_MIPS3
);
14989 check_mips_64(ctx
);
14990 mips32_op
= OPC_SD
;
14994 mips32_op
= OPC_SB
;
14997 mips32_op
= OPC_SH
;
15000 mips32_op
= OPC_SW
;
15003 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
15006 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
15009 generate_exception_end(ctx
, EXCP_RI
);
15014 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
15018 /* make sure instructions are on a halfword boundary */
15019 if (ctx
->pc
& 0x1) {
15020 env
->CP0_BadVAddr
= ctx
->pc
;
15021 generate_exception_end(ctx
, EXCP_AdEL
);
15025 op
= (ctx
->opcode
>> 10) & 0x3f;
15026 /* Enforce properly-sized instructions in a delay slot */
15027 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
15028 switch (op
& 0x7) { /* MSB-3..MSB-5 */
15030 /* POOL32A, POOL32B, POOL32I, POOL32C */
15032 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15034 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15036 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15038 /* LB32, LH32, LWC132, LDC132, LW32 */
15039 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15040 generate_exception_end(ctx
, EXCP_RI
);
15045 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15047 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15049 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15050 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15051 generate_exception_end(ctx
, EXCP_RI
);
15061 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15062 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15063 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15066 switch (ctx
->opcode
& 0x1) {
15074 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15075 /* In the Release 6 the register number location in
15076 * the instruction encoding has changed.
15078 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15080 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15086 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15087 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15088 int amount
= (ctx
->opcode
>> 1) & 0x7;
15090 amount
= amount
== 0 ? 8 : amount
;
15092 switch (ctx
->opcode
& 0x1) {
15101 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15105 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15106 gen_pool16c_r6_insn(ctx
);
15108 gen_pool16c_insn(ctx
);
15113 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15114 int rb
= 28; /* GP */
15115 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15117 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15121 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15122 if (ctx
->opcode
& 1) {
15123 generate_exception_end(ctx
, EXCP_RI
);
15126 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15127 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15128 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15129 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15134 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15135 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15136 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15137 offset
= (offset
== 0xf ? -1 : offset
);
15139 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15144 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15145 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15146 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15148 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15153 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15154 int rb
= 29; /* SP */
15155 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15157 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15162 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15163 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15164 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15166 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15171 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15172 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15173 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15175 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15180 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15181 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15182 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15184 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15189 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15190 int rb
= 29; /* SP */
15191 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15193 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15198 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15199 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15200 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15202 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15207 int rd
= uMIPS_RD5(ctx
->opcode
);
15208 int rs
= uMIPS_RS5(ctx
->opcode
);
15210 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15217 switch (ctx
->opcode
& 0x1) {
15227 switch (ctx
->opcode
& 0x1) {
15232 gen_addiur1sp(ctx
);
15236 case B16
: /* BC16 */
15237 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15238 sextract32(ctx
->opcode
, 0, 10) << 1,
15239 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15241 case BNEZ16
: /* BNEZC16 */
15242 case BEQZ16
: /* BEQZC16 */
15243 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15244 mmreg(uMIPS_RD(ctx
->opcode
)),
15245 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15246 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15251 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15252 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15254 imm
= (imm
== 0x7f ? -1 : imm
);
15255 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15261 generate_exception_end(ctx
, EXCP_RI
);
15264 decode_micromips32_opc(env
, ctx
);
15271 /* SmartMIPS extension to MIPS32 */
15273 #if defined(TARGET_MIPS64)
15275 /* MDMX extension to MIPS64 */
15279 /* MIPSDSP functions. */
15280 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15281 int rd
, int base
, int offset
)
15286 t0
= tcg_temp_new();
15289 gen_load_gpr(t0
, offset
);
15290 } else if (offset
== 0) {
15291 gen_load_gpr(t0
, base
);
15293 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15298 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15299 gen_store_gpr(t0
, rd
);
15302 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15303 gen_store_gpr(t0
, rd
);
15306 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15307 gen_store_gpr(t0
, rd
);
15309 #if defined(TARGET_MIPS64)
15311 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15312 gen_store_gpr(t0
, rd
);
15319 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15320 int ret
, int v1
, int v2
)
15326 /* Treat as NOP. */
15330 v1_t
= tcg_temp_new();
15331 v2_t
= tcg_temp_new();
15333 gen_load_gpr(v1_t
, v1
);
15334 gen_load_gpr(v2_t
, v2
);
15337 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15338 case OPC_MULT_G_2E
:
15342 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15344 case OPC_ADDUH_R_QB
:
15345 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15348 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15350 case OPC_ADDQH_R_PH
:
15351 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15354 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15356 case OPC_ADDQH_R_W
:
15357 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15360 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15362 case OPC_SUBUH_R_QB
:
15363 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15366 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15368 case OPC_SUBQH_R_PH
:
15369 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15372 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15374 case OPC_SUBQH_R_W
:
15375 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15379 case OPC_ABSQ_S_PH_DSP
:
15381 case OPC_ABSQ_S_QB
:
15383 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15385 case OPC_ABSQ_S_PH
:
15387 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15391 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15393 case OPC_PRECEQ_W_PHL
:
15395 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15396 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15398 case OPC_PRECEQ_W_PHR
:
15400 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15401 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15402 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15404 case OPC_PRECEQU_PH_QBL
:
15406 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15408 case OPC_PRECEQU_PH_QBR
:
15410 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15412 case OPC_PRECEQU_PH_QBLA
:
15414 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15416 case OPC_PRECEQU_PH_QBRA
:
15418 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15420 case OPC_PRECEU_PH_QBL
:
15422 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15424 case OPC_PRECEU_PH_QBR
:
15426 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15428 case OPC_PRECEU_PH_QBLA
:
15430 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15432 case OPC_PRECEU_PH_QBRA
:
15434 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15438 case OPC_ADDU_QB_DSP
:
15442 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15444 case OPC_ADDQ_S_PH
:
15446 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15450 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15454 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15456 case OPC_ADDU_S_QB
:
15458 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15462 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15464 case OPC_ADDU_S_PH
:
15466 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15470 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15472 case OPC_SUBQ_S_PH
:
15474 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15478 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15482 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15484 case OPC_SUBU_S_QB
:
15486 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15490 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15492 case OPC_SUBU_S_PH
:
15494 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15498 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15502 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15506 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15508 case OPC_RADDU_W_QB
:
15510 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15514 case OPC_CMPU_EQ_QB_DSP
:
15516 case OPC_PRECR_QB_PH
:
15518 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15520 case OPC_PRECRQ_QB_PH
:
15522 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15524 case OPC_PRECR_SRA_PH_W
:
15527 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15528 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15530 tcg_temp_free_i32(sa_t
);
15533 case OPC_PRECR_SRA_R_PH_W
:
15536 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15537 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15539 tcg_temp_free_i32(sa_t
);
15542 case OPC_PRECRQ_PH_W
:
15544 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15546 case OPC_PRECRQ_RS_PH_W
:
15548 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15550 case OPC_PRECRQU_S_QB_PH
:
15552 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15556 #ifdef TARGET_MIPS64
15557 case OPC_ABSQ_S_QH_DSP
:
15559 case OPC_PRECEQ_L_PWL
:
15561 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15563 case OPC_PRECEQ_L_PWR
:
15565 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15567 case OPC_PRECEQ_PW_QHL
:
15569 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15571 case OPC_PRECEQ_PW_QHR
:
15573 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15575 case OPC_PRECEQ_PW_QHLA
:
15577 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15579 case OPC_PRECEQ_PW_QHRA
:
15581 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15583 case OPC_PRECEQU_QH_OBL
:
15585 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15587 case OPC_PRECEQU_QH_OBR
:
15589 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15591 case OPC_PRECEQU_QH_OBLA
:
15593 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15595 case OPC_PRECEQU_QH_OBRA
:
15597 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15599 case OPC_PRECEU_QH_OBL
:
15601 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15603 case OPC_PRECEU_QH_OBR
:
15605 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15607 case OPC_PRECEU_QH_OBLA
:
15609 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15611 case OPC_PRECEU_QH_OBRA
:
15613 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15615 case OPC_ABSQ_S_OB
:
15617 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15619 case OPC_ABSQ_S_PW
:
15621 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15623 case OPC_ABSQ_S_QH
:
15625 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15629 case OPC_ADDU_OB_DSP
:
15631 case OPC_RADDU_L_OB
:
15633 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15637 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15639 case OPC_SUBQ_S_PW
:
15641 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15645 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15647 case OPC_SUBQ_S_QH
:
15649 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15653 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15655 case OPC_SUBU_S_OB
:
15657 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15661 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15663 case OPC_SUBU_S_QH
:
15665 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15669 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15671 case OPC_SUBUH_R_OB
:
15673 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15677 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15679 case OPC_ADDQ_S_PW
:
15681 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15685 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15687 case OPC_ADDQ_S_QH
:
15689 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15693 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15695 case OPC_ADDU_S_OB
:
15697 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15701 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15703 case OPC_ADDU_S_QH
:
15705 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15709 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15711 case OPC_ADDUH_R_OB
:
15713 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15717 case OPC_CMPU_EQ_OB_DSP
:
15719 case OPC_PRECR_OB_QH
:
15721 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15723 case OPC_PRECR_SRA_QH_PW
:
15726 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15727 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15728 tcg_temp_free_i32(ret_t
);
15731 case OPC_PRECR_SRA_R_QH_PW
:
15734 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15735 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15736 tcg_temp_free_i32(sa_v
);
15739 case OPC_PRECRQ_OB_QH
:
15741 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15743 case OPC_PRECRQ_PW_L
:
15745 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15747 case OPC_PRECRQ_QH_PW
:
15749 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15751 case OPC_PRECRQ_RS_QH_PW
:
15753 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15755 case OPC_PRECRQU_S_OB_QH
:
15757 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15764 tcg_temp_free(v1_t
);
15765 tcg_temp_free(v2_t
);
15768 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15769 int ret
, int v1
, int v2
)
15777 /* Treat as NOP. */
15781 t0
= tcg_temp_new();
15782 v1_t
= tcg_temp_new();
15783 v2_t
= tcg_temp_new();
15785 tcg_gen_movi_tl(t0
, v1
);
15786 gen_load_gpr(v1_t
, v1
);
15787 gen_load_gpr(v2_t
, v2
);
15790 case OPC_SHLL_QB_DSP
:
15792 op2
= MASK_SHLL_QB(ctx
->opcode
);
15796 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15800 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15804 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15808 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15810 case OPC_SHLL_S_PH
:
15812 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15814 case OPC_SHLLV_S_PH
:
15816 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15820 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15822 case OPC_SHLLV_S_W
:
15824 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15828 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15832 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15836 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15840 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15844 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15846 case OPC_SHRA_R_QB
:
15848 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15852 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15854 case OPC_SHRAV_R_QB
:
15856 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15860 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15862 case OPC_SHRA_R_PH
:
15864 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15868 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15870 case OPC_SHRAV_R_PH
:
15872 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15876 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15878 case OPC_SHRAV_R_W
:
15880 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15882 default: /* Invalid */
15883 MIPS_INVAL("MASK SHLL.QB");
15884 generate_exception_end(ctx
, EXCP_RI
);
15889 #ifdef TARGET_MIPS64
15890 case OPC_SHLL_OB_DSP
:
15891 op2
= MASK_SHLL_OB(ctx
->opcode
);
15895 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15899 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15901 case OPC_SHLL_S_PW
:
15903 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15905 case OPC_SHLLV_S_PW
:
15907 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15911 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15915 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15919 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15923 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15925 case OPC_SHLL_S_QH
:
15927 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15929 case OPC_SHLLV_S_QH
:
15931 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15935 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15939 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15941 case OPC_SHRA_R_OB
:
15943 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15945 case OPC_SHRAV_R_OB
:
15947 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15951 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15955 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15957 case OPC_SHRA_R_PW
:
15959 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15961 case OPC_SHRAV_R_PW
:
15963 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15967 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15971 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15973 case OPC_SHRA_R_QH
:
15975 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15977 case OPC_SHRAV_R_QH
:
15979 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15983 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15987 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15991 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15995 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15997 default: /* Invalid */
15998 MIPS_INVAL("MASK SHLL.OB");
15999 generate_exception_end(ctx
, EXCP_RI
);
16007 tcg_temp_free(v1_t
);
16008 tcg_temp_free(v2_t
);
16011 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16012 int ret
, int v1
, int v2
, int check_ret
)
16018 if ((ret
== 0) && (check_ret
== 1)) {
16019 /* Treat as NOP. */
16023 t0
= tcg_temp_new_i32();
16024 v1_t
= tcg_temp_new();
16025 v2_t
= tcg_temp_new();
16027 tcg_gen_movi_i32(t0
, ret
);
16028 gen_load_gpr(v1_t
, v1
);
16029 gen_load_gpr(v2_t
, v2
);
16032 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
16033 * the same mask and op1. */
16034 case OPC_MULT_G_2E
:
16038 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16041 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16044 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16046 case OPC_MULQ_RS_W
:
16047 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16051 case OPC_DPA_W_PH_DSP
:
16053 case OPC_DPAU_H_QBL
:
16055 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16057 case OPC_DPAU_H_QBR
:
16059 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16061 case OPC_DPSU_H_QBL
:
16063 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16065 case OPC_DPSU_H_QBR
:
16067 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16071 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16073 case OPC_DPAX_W_PH
:
16075 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16077 case OPC_DPAQ_S_W_PH
:
16079 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16081 case OPC_DPAQX_S_W_PH
:
16083 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16085 case OPC_DPAQX_SA_W_PH
:
16087 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16091 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16093 case OPC_DPSX_W_PH
:
16095 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16097 case OPC_DPSQ_S_W_PH
:
16099 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16101 case OPC_DPSQX_S_W_PH
:
16103 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16105 case OPC_DPSQX_SA_W_PH
:
16107 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16109 case OPC_MULSAQ_S_W_PH
:
16111 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16113 case OPC_DPAQ_SA_L_W
:
16115 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16117 case OPC_DPSQ_SA_L_W
:
16119 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16121 case OPC_MAQ_S_W_PHL
:
16123 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16125 case OPC_MAQ_S_W_PHR
:
16127 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16129 case OPC_MAQ_SA_W_PHL
:
16131 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16133 case OPC_MAQ_SA_W_PHR
:
16135 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16137 case OPC_MULSA_W_PH
:
16139 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16143 #ifdef TARGET_MIPS64
16144 case OPC_DPAQ_W_QH_DSP
:
16146 int ac
= ret
& 0x03;
16147 tcg_gen_movi_i32(t0
, ac
);
16152 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16156 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16160 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16164 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16168 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16170 case OPC_DPAQ_S_W_QH
:
16172 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16174 case OPC_DPAQ_SA_L_PW
:
16176 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16178 case OPC_DPAU_H_OBL
:
16180 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16182 case OPC_DPAU_H_OBR
:
16184 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16188 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16190 case OPC_DPSQ_S_W_QH
:
16192 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16194 case OPC_DPSQ_SA_L_PW
:
16196 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16198 case OPC_DPSU_H_OBL
:
16200 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16202 case OPC_DPSU_H_OBR
:
16204 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16206 case OPC_MAQ_S_L_PWL
:
16208 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16210 case OPC_MAQ_S_L_PWR
:
16212 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16214 case OPC_MAQ_S_W_QHLL
:
16216 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16218 case OPC_MAQ_SA_W_QHLL
:
16220 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16222 case OPC_MAQ_S_W_QHLR
:
16224 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16226 case OPC_MAQ_SA_W_QHLR
:
16228 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16230 case OPC_MAQ_S_W_QHRL
:
16232 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16234 case OPC_MAQ_SA_W_QHRL
:
16236 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16238 case OPC_MAQ_S_W_QHRR
:
16240 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16242 case OPC_MAQ_SA_W_QHRR
:
16244 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16246 case OPC_MULSAQ_S_L_PW
:
16248 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16250 case OPC_MULSAQ_S_W_QH
:
16252 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16258 case OPC_ADDU_QB_DSP
:
16260 case OPC_MULEU_S_PH_QBL
:
16262 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16264 case OPC_MULEU_S_PH_QBR
:
16266 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16268 case OPC_MULQ_RS_PH
:
16270 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16272 case OPC_MULEQ_S_W_PHL
:
16274 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16276 case OPC_MULEQ_S_W_PHR
:
16278 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16280 case OPC_MULQ_S_PH
:
16282 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16286 #ifdef TARGET_MIPS64
16287 case OPC_ADDU_OB_DSP
:
16289 case OPC_MULEQ_S_PW_QHL
:
16291 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16293 case OPC_MULEQ_S_PW_QHR
:
16295 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16297 case OPC_MULEU_S_QH_OBL
:
16299 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16301 case OPC_MULEU_S_QH_OBR
:
16303 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16305 case OPC_MULQ_RS_QH
:
16307 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16314 tcg_temp_free_i32(t0
);
16315 tcg_temp_free(v1_t
);
16316 tcg_temp_free(v2_t
);
16319 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16327 /* Treat as NOP. */
16331 t0
= tcg_temp_new();
16332 val_t
= tcg_temp_new();
16333 gen_load_gpr(val_t
, val
);
16336 case OPC_ABSQ_S_PH_DSP
:
16340 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16345 target_long result
;
16346 imm
= (ctx
->opcode
>> 16) & 0xFF;
16347 result
= (uint32_t)imm
<< 24 |
16348 (uint32_t)imm
<< 16 |
16349 (uint32_t)imm
<< 8 |
16351 result
= (int32_t)result
;
16352 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16357 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16358 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16359 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16360 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16361 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16362 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16367 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16368 imm
= (int16_t)(imm
<< 6) >> 6;
16369 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16370 (target_long
)((int32_t)imm
<< 16 | \
16376 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16377 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16378 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16379 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16383 #ifdef TARGET_MIPS64
16384 case OPC_ABSQ_S_QH_DSP
:
16391 imm
= (ctx
->opcode
>> 16) & 0xFF;
16392 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16393 temp
= (temp
<< 16) | temp
;
16394 temp
= (temp
<< 32) | temp
;
16395 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16403 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16404 imm
= (int16_t)(imm
<< 6) >> 6;
16405 temp
= ((target_long
)imm
<< 32) \
16406 | ((target_long
)imm
& 0xFFFFFFFF);
16407 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16415 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16416 imm
= (int16_t)(imm
<< 6) >> 6;
16418 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16419 ((uint64_t)(uint16_t)imm
<< 32) |
16420 ((uint64_t)(uint16_t)imm
<< 16) |
16421 (uint64_t)(uint16_t)imm
;
16422 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16427 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16428 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16429 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16430 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16431 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16432 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16433 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16437 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16438 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16439 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16443 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16444 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16445 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16446 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16447 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16454 tcg_temp_free(val_t
);
16457 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16458 uint32_t op1
, uint32_t op2
,
16459 int ret
, int v1
, int v2
, int check_ret
)
16465 if ((ret
== 0) && (check_ret
== 1)) {
16466 /* Treat as NOP. */
16470 t1
= tcg_temp_new();
16471 v1_t
= tcg_temp_new();
16472 v2_t
= tcg_temp_new();
16474 gen_load_gpr(v1_t
, v1
);
16475 gen_load_gpr(v2_t
, v2
);
16478 case OPC_CMPU_EQ_QB_DSP
:
16480 case OPC_CMPU_EQ_QB
:
16482 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16484 case OPC_CMPU_LT_QB
:
16486 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16488 case OPC_CMPU_LE_QB
:
16490 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16492 case OPC_CMPGU_EQ_QB
:
16494 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16496 case OPC_CMPGU_LT_QB
:
16498 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16500 case OPC_CMPGU_LE_QB
:
16502 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16504 case OPC_CMPGDU_EQ_QB
:
16506 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16507 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16508 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16509 tcg_gen_shli_tl(t1
, t1
, 24);
16510 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16512 case OPC_CMPGDU_LT_QB
:
16514 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16515 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16516 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16517 tcg_gen_shli_tl(t1
, t1
, 24);
16518 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16520 case OPC_CMPGDU_LE_QB
:
16522 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16523 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16524 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16525 tcg_gen_shli_tl(t1
, t1
, 24);
16526 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16528 case OPC_CMP_EQ_PH
:
16530 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16532 case OPC_CMP_LT_PH
:
16534 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16536 case OPC_CMP_LE_PH
:
16538 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16542 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16546 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16548 case OPC_PACKRL_PH
:
16550 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16554 #ifdef TARGET_MIPS64
16555 case OPC_CMPU_EQ_OB_DSP
:
16557 case OPC_CMP_EQ_PW
:
16559 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16561 case OPC_CMP_LT_PW
:
16563 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16565 case OPC_CMP_LE_PW
:
16567 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16569 case OPC_CMP_EQ_QH
:
16571 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16573 case OPC_CMP_LT_QH
:
16575 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16577 case OPC_CMP_LE_QH
:
16579 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16581 case OPC_CMPGDU_EQ_OB
:
16583 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16585 case OPC_CMPGDU_LT_OB
:
16587 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16589 case OPC_CMPGDU_LE_OB
:
16591 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16593 case OPC_CMPGU_EQ_OB
:
16595 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16597 case OPC_CMPGU_LT_OB
:
16599 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16601 case OPC_CMPGU_LE_OB
:
16603 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16605 case OPC_CMPU_EQ_OB
:
16607 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16609 case OPC_CMPU_LT_OB
:
16611 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16613 case OPC_CMPU_LE_OB
:
16615 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16617 case OPC_PACKRL_PW
:
16619 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16623 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16627 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16631 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16639 tcg_temp_free(v1_t
);
16640 tcg_temp_free(v2_t
);
16643 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16644 uint32_t op1
, int rt
, int rs
, int sa
)
16651 /* Treat as NOP. */
16655 t0
= tcg_temp_new();
16656 gen_load_gpr(t0
, rs
);
16659 case OPC_APPEND_DSP
:
16660 switch (MASK_APPEND(ctx
->opcode
)) {
16663 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16665 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16669 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16670 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16671 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16672 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16674 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16678 if (sa
!= 0 && sa
!= 2) {
16679 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16680 tcg_gen_ext32u_tl(t0
, t0
);
16681 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16682 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16684 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16686 default: /* Invalid */
16687 MIPS_INVAL("MASK APPEND");
16688 generate_exception_end(ctx
, EXCP_RI
);
16692 #ifdef TARGET_MIPS64
16693 case OPC_DAPPEND_DSP
:
16694 switch (MASK_DAPPEND(ctx
->opcode
)) {
16697 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16701 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16702 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16703 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16707 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16708 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16709 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16714 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16715 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16716 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16717 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16720 default: /* Invalid */
16721 MIPS_INVAL("MASK DAPPEND");
16722 generate_exception_end(ctx
, EXCP_RI
);
16731 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16732 int ret
, int v1
, int v2
, int check_ret
)
16741 if ((ret
== 0) && (check_ret
== 1)) {
16742 /* Treat as NOP. */
16746 t0
= tcg_temp_new();
16747 t1
= tcg_temp_new();
16748 v1_t
= tcg_temp_new();
16749 v2_t
= tcg_temp_new();
16751 gen_load_gpr(v1_t
, v1
);
16752 gen_load_gpr(v2_t
, v2
);
16755 case OPC_EXTR_W_DSP
:
16759 tcg_gen_movi_tl(t0
, v2
);
16760 tcg_gen_movi_tl(t1
, v1
);
16761 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16764 tcg_gen_movi_tl(t0
, v2
);
16765 tcg_gen_movi_tl(t1
, v1
);
16766 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16768 case OPC_EXTR_RS_W
:
16769 tcg_gen_movi_tl(t0
, v2
);
16770 tcg_gen_movi_tl(t1
, v1
);
16771 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16774 tcg_gen_movi_tl(t0
, v2
);
16775 tcg_gen_movi_tl(t1
, v1
);
16776 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16778 case OPC_EXTRV_S_H
:
16779 tcg_gen_movi_tl(t0
, v2
);
16780 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16783 tcg_gen_movi_tl(t0
, v2
);
16784 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16786 case OPC_EXTRV_R_W
:
16787 tcg_gen_movi_tl(t0
, v2
);
16788 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16790 case OPC_EXTRV_RS_W
:
16791 tcg_gen_movi_tl(t0
, v2
);
16792 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16795 tcg_gen_movi_tl(t0
, v2
);
16796 tcg_gen_movi_tl(t1
, v1
);
16797 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16800 tcg_gen_movi_tl(t0
, v2
);
16801 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16804 tcg_gen_movi_tl(t0
, v2
);
16805 tcg_gen_movi_tl(t1
, v1
);
16806 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16809 tcg_gen_movi_tl(t0
, v2
);
16810 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16813 imm
= (ctx
->opcode
>> 20) & 0x3F;
16814 tcg_gen_movi_tl(t0
, ret
);
16815 tcg_gen_movi_tl(t1
, imm
);
16816 gen_helper_shilo(t0
, t1
, cpu_env
);
16819 tcg_gen_movi_tl(t0
, ret
);
16820 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16823 tcg_gen_movi_tl(t0
, ret
);
16824 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16827 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16828 tcg_gen_movi_tl(t0
, imm
);
16829 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16832 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16833 tcg_gen_movi_tl(t0
, imm
);
16834 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16838 #ifdef TARGET_MIPS64
16839 case OPC_DEXTR_W_DSP
:
16843 tcg_gen_movi_tl(t0
, ret
);
16844 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16848 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16849 int ac
= (ctx
->opcode
>> 11) & 0x03;
16850 tcg_gen_movi_tl(t0
, shift
);
16851 tcg_gen_movi_tl(t1
, ac
);
16852 gen_helper_dshilo(t0
, t1
, cpu_env
);
16857 int ac
= (ctx
->opcode
>> 11) & 0x03;
16858 tcg_gen_movi_tl(t0
, ac
);
16859 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16863 tcg_gen_movi_tl(t0
, v2
);
16864 tcg_gen_movi_tl(t1
, v1
);
16866 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16869 tcg_gen_movi_tl(t0
, v2
);
16870 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16873 tcg_gen_movi_tl(t0
, v2
);
16874 tcg_gen_movi_tl(t1
, v1
);
16875 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16878 tcg_gen_movi_tl(t0
, v2
);
16879 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16882 tcg_gen_movi_tl(t0
, v2
);
16883 tcg_gen_movi_tl(t1
, v1
);
16884 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16886 case OPC_DEXTR_R_L
:
16887 tcg_gen_movi_tl(t0
, v2
);
16888 tcg_gen_movi_tl(t1
, v1
);
16889 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16891 case OPC_DEXTR_RS_L
:
16892 tcg_gen_movi_tl(t0
, v2
);
16893 tcg_gen_movi_tl(t1
, v1
);
16894 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16897 tcg_gen_movi_tl(t0
, v2
);
16898 tcg_gen_movi_tl(t1
, v1
);
16899 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16901 case OPC_DEXTR_R_W
:
16902 tcg_gen_movi_tl(t0
, v2
);
16903 tcg_gen_movi_tl(t1
, v1
);
16904 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16906 case OPC_DEXTR_RS_W
:
16907 tcg_gen_movi_tl(t0
, v2
);
16908 tcg_gen_movi_tl(t1
, v1
);
16909 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16911 case OPC_DEXTR_S_H
:
16912 tcg_gen_movi_tl(t0
, v2
);
16913 tcg_gen_movi_tl(t1
, v1
);
16914 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16916 case OPC_DEXTRV_S_H
:
16917 tcg_gen_movi_tl(t0
, v2
);
16918 tcg_gen_movi_tl(t1
, v1
);
16919 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16922 tcg_gen_movi_tl(t0
, v2
);
16923 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16925 case OPC_DEXTRV_R_L
:
16926 tcg_gen_movi_tl(t0
, v2
);
16927 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16929 case OPC_DEXTRV_RS_L
:
16930 tcg_gen_movi_tl(t0
, v2
);
16931 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16934 tcg_gen_movi_tl(t0
, v2
);
16935 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16937 case OPC_DEXTRV_R_W
:
16938 tcg_gen_movi_tl(t0
, v2
);
16939 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16941 case OPC_DEXTRV_RS_W
:
16942 tcg_gen_movi_tl(t0
, v2
);
16943 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16952 tcg_temp_free(v1_t
);
16953 tcg_temp_free(v2_t
);
16956 /* End MIPSDSP functions. */
16958 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16960 int rs
, rt
, rd
, sa
;
16963 rs
= (ctx
->opcode
>> 21) & 0x1f;
16964 rt
= (ctx
->opcode
>> 16) & 0x1f;
16965 rd
= (ctx
->opcode
>> 11) & 0x1f;
16966 sa
= (ctx
->opcode
>> 6) & 0x1f;
16968 op1
= MASK_SPECIAL(ctx
->opcode
);
16971 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16973 case OPC_MULT
... OPC_DIVU
:
16974 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16984 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16987 MIPS_INVAL("special_r6 muldiv");
16988 generate_exception_end(ctx
, EXCP_RI
);
16994 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16998 if (rt
== 0 && sa
== 1) {
16999 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17000 We need additionally to check other fields */
17001 gen_cl(ctx
, op1
, rd
, rs
);
17003 generate_exception_end(ctx
, EXCP_RI
);
17007 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17008 gen_helper_do_semihosting(cpu_env
);
17010 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
17011 generate_exception_end(ctx
, EXCP_RI
);
17013 generate_exception_end(ctx
, EXCP_DBp
);
17017 #if defined(TARGET_MIPS64)
17019 check_mips_64(ctx
);
17020 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
17024 if (rt
== 0 && sa
== 1) {
17025 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
17026 We need additionally to check other fields */
17027 check_mips_64(ctx
);
17028 gen_cl(ctx
, op1
, rd
, rs
);
17030 generate_exception_end(ctx
, EXCP_RI
);
17033 case OPC_DMULT
... OPC_DDIVU
:
17034 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17044 check_mips_64(ctx
);
17045 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17048 MIPS_INVAL("special_r6 muldiv");
17049 generate_exception_end(ctx
, EXCP_RI
);
17054 default: /* Invalid */
17055 MIPS_INVAL("special_r6");
17056 generate_exception_end(ctx
, EXCP_RI
);
17061 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17063 int rs
, rt
, rd
, sa
;
17066 rs
= (ctx
->opcode
>> 21) & 0x1f;
17067 rt
= (ctx
->opcode
>> 16) & 0x1f;
17068 rd
= (ctx
->opcode
>> 11) & 0x1f;
17069 sa
= (ctx
->opcode
>> 6) & 0x1f;
17071 op1
= MASK_SPECIAL(ctx
->opcode
);
17073 case OPC_MOVN
: /* Conditional move */
17075 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17076 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17077 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17079 case OPC_MFHI
: /* Move from HI/LO */
17081 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17084 case OPC_MTLO
: /* Move to HI/LO */
17085 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17088 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17089 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17090 check_cp1_enabled(ctx
);
17091 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17092 (ctx
->opcode
>> 16) & 1);
17094 generate_exception_err(ctx
, EXCP_CpU
, 1);
17100 check_insn(ctx
, INSN_VR54XX
);
17101 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17102 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17104 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17109 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17111 #if defined(TARGET_MIPS64)
17112 case OPC_DMULT
... OPC_DDIVU
:
17113 check_insn(ctx
, ISA_MIPS3
);
17114 check_mips_64(ctx
);
17115 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17119 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17122 #ifdef MIPS_STRICT_STANDARD
17123 MIPS_INVAL("SPIM");
17124 generate_exception_end(ctx
, EXCP_RI
);
17126 /* Implemented as RI exception for now. */
17127 MIPS_INVAL("spim (unofficial)");
17128 generate_exception_end(ctx
, EXCP_RI
);
17131 default: /* Invalid */
17132 MIPS_INVAL("special_legacy");
17133 generate_exception_end(ctx
, EXCP_RI
);
17138 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17140 int rs
, rt
, rd
, sa
;
17143 rs
= (ctx
->opcode
>> 21) & 0x1f;
17144 rt
= (ctx
->opcode
>> 16) & 0x1f;
17145 rd
= (ctx
->opcode
>> 11) & 0x1f;
17146 sa
= (ctx
->opcode
>> 6) & 0x1f;
17148 op1
= MASK_SPECIAL(ctx
->opcode
);
17150 case OPC_SLL
: /* Shift with immediate */
17151 if (sa
== 5 && rd
== 0 &&
17152 rs
== 0 && rt
== 0) { /* PAUSE */
17153 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17154 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17155 generate_exception_end(ctx
, EXCP_RI
);
17161 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17164 switch ((ctx
->opcode
>> 21) & 0x1f) {
17166 /* rotr is decoded as srl on non-R2 CPUs */
17167 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17172 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17175 generate_exception_end(ctx
, EXCP_RI
);
17179 case OPC_ADD
... OPC_SUBU
:
17180 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17182 case OPC_SLLV
: /* Shifts */
17184 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17187 switch ((ctx
->opcode
>> 6) & 0x1f) {
17189 /* rotrv is decoded as srlv on non-R2 CPUs */
17190 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17195 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17198 generate_exception_end(ctx
, EXCP_RI
);
17202 case OPC_SLT
: /* Set on less than */
17204 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17206 case OPC_AND
: /* Logic*/
17210 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17213 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17215 case OPC_TGE
... OPC_TEQ
: /* Traps */
17217 check_insn(ctx
, ISA_MIPS2
);
17218 gen_trap(ctx
, op1
, rs
, rt
, -1);
17220 case OPC_LSA
: /* OPC_PMON */
17221 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17222 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17223 decode_opc_special_r6(env
, ctx
);
17225 /* Pmon entry point, also R4010 selsl */
17226 #ifdef MIPS_STRICT_STANDARD
17227 MIPS_INVAL("PMON / selsl");
17228 generate_exception_end(ctx
, EXCP_RI
);
17230 gen_helper_0e0i(pmon
, sa
);
17235 generate_exception_end(ctx
, EXCP_SYSCALL
);
17238 generate_exception_end(ctx
, EXCP_BREAK
);
17241 check_insn(ctx
, ISA_MIPS2
);
17242 gen_sync(extract32(ctx
->opcode
, 6, 5));
17245 #if defined(TARGET_MIPS64)
17246 /* MIPS64 specific opcodes */
17251 check_insn(ctx
, ISA_MIPS3
);
17252 check_mips_64(ctx
);
17253 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17256 switch ((ctx
->opcode
>> 21) & 0x1f) {
17258 /* drotr is decoded as dsrl on non-R2 CPUs */
17259 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17264 check_insn(ctx
, ISA_MIPS3
);
17265 check_mips_64(ctx
);
17266 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17269 generate_exception_end(ctx
, EXCP_RI
);
17274 switch ((ctx
->opcode
>> 21) & 0x1f) {
17276 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17277 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17282 check_insn(ctx
, ISA_MIPS3
);
17283 check_mips_64(ctx
);
17284 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17287 generate_exception_end(ctx
, EXCP_RI
);
17291 case OPC_DADD
... OPC_DSUBU
:
17292 check_insn(ctx
, ISA_MIPS3
);
17293 check_mips_64(ctx
);
17294 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17298 check_insn(ctx
, ISA_MIPS3
);
17299 check_mips_64(ctx
);
17300 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17303 switch ((ctx
->opcode
>> 6) & 0x1f) {
17305 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17306 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17311 check_insn(ctx
, ISA_MIPS3
);
17312 check_mips_64(ctx
);
17313 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17316 generate_exception_end(ctx
, EXCP_RI
);
17321 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17322 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17323 decode_opc_special_r6(env
, ctx
);
17328 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17329 decode_opc_special_r6(env
, ctx
);
17331 decode_opc_special_legacy(env
, ctx
);
17336 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17341 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17343 rs
= (ctx
->opcode
>> 21) & 0x1f;
17344 rt
= (ctx
->opcode
>> 16) & 0x1f;
17345 rd
= (ctx
->opcode
>> 11) & 0x1f;
17347 op1
= MASK_SPECIAL2(ctx
->opcode
);
17349 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17350 case OPC_MSUB
... OPC_MSUBU
:
17351 check_insn(ctx
, ISA_MIPS32
);
17352 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17355 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17358 case OPC_DIVU_G_2F
:
17359 case OPC_MULT_G_2F
:
17360 case OPC_MULTU_G_2F
:
17362 case OPC_MODU_G_2F
:
17363 check_insn(ctx
, INSN_LOONGSON2F
);
17364 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17368 check_insn(ctx
, ISA_MIPS32
);
17369 gen_cl(ctx
, op1
, rd
, rs
);
17372 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17373 gen_helper_do_semihosting(cpu_env
);
17375 /* XXX: not clear which exception should be raised
17376 * when in debug mode...
17378 check_insn(ctx
, ISA_MIPS32
);
17379 generate_exception_end(ctx
, EXCP_DBp
);
17382 #if defined(TARGET_MIPS64)
17385 check_insn(ctx
, ISA_MIPS64
);
17386 check_mips_64(ctx
);
17387 gen_cl(ctx
, op1
, rd
, rs
);
17389 case OPC_DMULT_G_2F
:
17390 case OPC_DMULTU_G_2F
:
17391 case OPC_DDIV_G_2F
:
17392 case OPC_DDIVU_G_2F
:
17393 case OPC_DMOD_G_2F
:
17394 case OPC_DMODU_G_2F
:
17395 check_insn(ctx
, INSN_LOONGSON2F
);
17396 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17399 default: /* Invalid */
17400 MIPS_INVAL("special2_legacy");
17401 generate_exception_end(ctx
, EXCP_RI
);
17406 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17408 int rs
, rt
, rd
, sa
;
17412 rs
= (ctx
->opcode
>> 21) & 0x1f;
17413 rt
= (ctx
->opcode
>> 16) & 0x1f;
17414 rd
= (ctx
->opcode
>> 11) & 0x1f;
17415 sa
= (ctx
->opcode
>> 6) & 0x1f;
17416 imm
= (int16_t)ctx
->opcode
>> 7;
17418 op1
= MASK_SPECIAL3(ctx
->opcode
);
17422 /* hint codes 24-31 are reserved and signal RI */
17423 generate_exception_end(ctx
, EXCP_RI
);
17425 /* Treat as NOP. */
17428 check_cp0_enabled(ctx
);
17429 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17430 gen_cache_operation(ctx
, rt
, rs
, imm
);
17434 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17437 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17442 /* Treat as NOP. */
17445 op2
= MASK_BSHFL(ctx
->opcode
);
17447 case OPC_ALIGN
... OPC_ALIGN_END
:
17448 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17451 gen_bitswap(ctx
, op2
, rd
, rt
);
17456 #if defined(TARGET_MIPS64)
17458 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17461 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17464 check_mips_64(ctx
);
17467 /* Treat as NOP. */
17470 op2
= MASK_DBSHFL(ctx
->opcode
);
17472 case OPC_DALIGN
... OPC_DALIGN_END
:
17473 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17476 gen_bitswap(ctx
, op2
, rd
, rt
);
17483 default: /* Invalid */
17484 MIPS_INVAL("special3_r6");
17485 generate_exception_end(ctx
, EXCP_RI
);
17490 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17495 rs
= (ctx
->opcode
>> 21) & 0x1f;
17496 rt
= (ctx
->opcode
>> 16) & 0x1f;
17497 rd
= (ctx
->opcode
>> 11) & 0x1f;
17499 op1
= MASK_SPECIAL3(ctx
->opcode
);
17501 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17502 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17503 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17504 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17505 * the same mask and op1. */
17506 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17507 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17510 case OPC_ADDUH_R_QB
:
17512 case OPC_ADDQH_R_PH
:
17514 case OPC_ADDQH_R_W
:
17516 case OPC_SUBUH_R_QB
:
17518 case OPC_SUBQH_R_PH
:
17520 case OPC_SUBQH_R_W
:
17521 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17526 case OPC_MULQ_RS_W
:
17527 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17530 MIPS_INVAL("MASK ADDUH.QB");
17531 generate_exception_end(ctx
, EXCP_RI
);
17534 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17535 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17537 generate_exception_end(ctx
, EXCP_RI
);
17541 op2
= MASK_LX(ctx
->opcode
);
17543 #if defined(TARGET_MIPS64)
17549 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17551 default: /* Invalid */
17552 MIPS_INVAL("MASK LX");
17553 generate_exception_end(ctx
, EXCP_RI
);
17557 case OPC_ABSQ_S_PH_DSP
:
17558 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17560 case OPC_ABSQ_S_QB
:
17561 case OPC_ABSQ_S_PH
:
17563 case OPC_PRECEQ_W_PHL
:
17564 case OPC_PRECEQ_W_PHR
:
17565 case OPC_PRECEQU_PH_QBL
:
17566 case OPC_PRECEQU_PH_QBR
:
17567 case OPC_PRECEQU_PH_QBLA
:
17568 case OPC_PRECEQU_PH_QBRA
:
17569 case OPC_PRECEU_PH_QBL
:
17570 case OPC_PRECEU_PH_QBR
:
17571 case OPC_PRECEU_PH_QBLA
:
17572 case OPC_PRECEU_PH_QBRA
:
17573 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17580 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17583 MIPS_INVAL("MASK ABSQ_S.PH");
17584 generate_exception_end(ctx
, EXCP_RI
);
17588 case OPC_ADDU_QB_DSP
:
17589 op2
= MASK_ADDU_QB(ctx
->opcode
);
17592 case OPC_ADDQ_S_PH
:
17595 case OPC_ADDU_S_QB
:
17597 case OPC_ADDU_S_PH
:
17599 case OPC_SUBQ_S_PH
:
17602 case OPC_SUBU_S_QB
:
17604 case OPC_SUBU_S_PH
:
17608 case OPC_RADDU_W_QB
:
17609 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17611 case OPC_MULEU_S_PH_QBL
:
17612 case OPC_MULEU_S_PH_QBR
:
17613 case OPC_MULQ_RS_PH
:
17614 case OPC_MULEQ_S_W_PHL
:
17615 case OPC_MULEQ_S_W_PHR
:
17616 case OPC_MULQ_S_PH
:
17617 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17619 default: /* Invalid */
17620 MIPS_INVAL("MASK ADDU.QB");
17621 generate_exception_end(ctx
, EXCP_RI
);
17626 case OPC_CMPU_EQ_QB_DSP
:
17627 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17629 case OPC_PRECR_SRA_PH_W
:
17630 case OPC_PRECR_SRA_R_PH_W
:
17631 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17633 case OPC_PRECR_QB_PH
:
17634 case OPC_PRECRQ_QB_PH
:
17635 case OPC_PRECRQ_PH_W
:
17636 case OPC_PRECRQ_RS_PH_W
:
17637 case OPC_PRECRQU_S_QB_PH
:
17638 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17640 case OPC_CMPU_EQ_QB
:
17641 case OPC_CMPU_LT_QB
:
17642 case OPC_CMPU_LE_QB
:
17643 case OPC_CMP_EQ_PH
:
17644 case OPC_CMP_LT_PH
:
17645 case OPC_CMP_LE_PH
:
17646 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17648 case OPC_CMPGU_EQ_QB
:
17649 case OPC_CMPGU_LT_QB
:
17650 case OPC_CMPGU_LE_QB
:
17651 case OPC_CMPGDU_EQ_QB
:
17652 case OPC_CMPGDU_LT_QB
:
17653 case OPC_CMPGDU_LE_QB
:
17656 case OPC_PACKRL_PH
:
17657 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17659 default: /* Invalid */
17660 MIPS_INVAL("MASK CMPU.EQ.QB");
17661 generate_exception_end(ctx
, EXCP_RI
);
17665 case OPC_SHLL_QB_DSP
:
17666 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17668 case OPC_DPA_W_PH_DSP
:
17669 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17671 case OPC_DPAU_H_QBL
:
17672 case OPC_DPAU_H_QBR
:
17673 case OPC_DPSU_H_QBL
:
17674 case OPC_DPSU_H_QBR
:
17676 case OPC_DPAX_W_PH
:
17677 case OPC_DPAQ_S_W_PH
:
17678 case OPC_DPAQX_S_W_PH
:
17679 case OPC_DPAQX_SA_W_PH
:
17681 case OPC_DPSX_W_PH
:
17682 case OPC_DPSQ_S_W_PH
:
17683 case OPC_DPSQX_S_W_PH
:
17684 case OPC_DPSQX_SA_W_PH
:
17685 case OPC_MULSAQ_S_W_PH
:
17686 case OPC_DPAQ_SA_L_W
:
17687 case OPC_DPSQ_SA_L_W
:
17688 case OPC_MAQ_S_W_PHL
:
17689 case OPC_MAQ_S_W_PHR
:
17690 case OPC_MAQ_SA_W_PHL
:
17691 case OPC_MAQ_SA_W_PHR
:
17692 case OPC_MULSA_W_PH
:
17693 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17695 default: /* Invalid */
17696 MIPS_INVAL("MASK DPAW.PH");
17697 generate_exception_end(ctx
, EXCP_RI
);
17702 op2
= MASK_INSV(ctx
->opcode
);
17713 t0
= tcg_temp_new();
17714 t1
= tcg_temp_new();
17716 gen_load_gpr(t0
, rt
);
17717 gen_load_gpr(t1
, rs
);
17719 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17725 default: /* Invalid */
17726 MIPS_INVAL("MASK INSV");
17727 generate_exception_end(ctx
, EXCP_RI
);
17731 case OPC_APPEND_DSP
:
17732 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17734 case OPC_EXTR_W_DSP
:
17735 op2
= MASK_EXTR_W(ctx
->opcode
);
17739 case OPC_EXTR_RS_W
:
17741 case OPC_EXTRV_S_H
:
17743 case OPC_EXTRV_R_W
:
17744 case OPC_EXTRV_RS_W
:
17749 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17752 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17758 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17760 default: /* Invalid */
17761 MIPS_INVAL("MASK EXTR.W");
17762 generate_exception_end(ctx
, EXCP_RI
);
17766 #if defined(TARGET_MIPS64)
17767 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17768 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17769 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17770 check_insn(ctx
, INSN_LOONGSON2E
);
17771 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17773 case OPC_ABSQ_S_QH_DSP
:
17774 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17776 case OPC_PRECEQ_L_PWL
:
17777 case OPC_PRECEQ_L_PWR
:
17778 case OPC_PRECEQ_PW_QHL
:
17779 case OPC_PRECEQ_PW_QHR
:
17780 case OPC_PRECEQ_PW_QHLA
:
17781 case OPC_PRECEQ_PW_QHRA
:
17782 case OPC_PRECEQU_QH_OBL
:
17783 case OPC_PRECEQU_QH_OBR
:
17784 case OPC_PRECEQU_QH_OBLA
:
17785 case OPC_PRECEQU_QH_OBRA
:
17786 case OPC_PRECEU_QH_OBL
:
17787 case OPC_PRECEU_QH_OBR
:
17788 case OPC_PRECEU_QH_OBLA
:
17789 case OPC_PRECEU_QH_OBRA
:
17790 case OPC_ABSQ_S_OB
:
17791 case OPC_ABSQ_S_PW
:
17792 case OPC_ABSQ_S_QH
:
17793 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17801 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17803 default: /* Invalid */
17804 MIPS_INVAL("MASK ABSQ_S.QH");
17805 generate_exception_end(ctx
, EXCP_RI
);
17809 case OPC_ADDU_OB_DSP
:
17810 op2
= MASK_ADDU_OB(ctx
->opcode
);
17812 case OPC_RADDU_L_OB
:
17814 case OPC_SUBQ_S_PW
:
17816 case OPC_SUBQ_S_QH
:
17818 case OPC_SUBU_S_OB
:
17820 case OPC_SUBU_S_QH
:
17822 case OPC_SUBUH_R_OB
:
17824 case OPC_ADDQ_S_PW
:
17826 case OPC_ADDQ_S_QH
:
17828 case OPC_ADDU_S_OB
:
17830 case OPC_ADDU_S_QH
:
17832 case OPC_ADDUH_R_OB
:
17833 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17835 case OPC_MULEQ_S_PW_QHL
:
17836 case OPC_MULEQ_S_PW_QHR
:
17837 case OPC_MULEU_S_QH_OBL
:
17838 case OPC_MULEU_S_QH_OBR
:
17839 case OPC_MULQ_RS_QH
:
17840 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17842 default: /* Invalid */
17843 MIPS_INVAL("MASK ADDU.OB");
17844 generate_exception_end(ctx
, EXCP_RI
);
17848 case OPC_CMPU_EQ_OB_DSP
:
17849 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17851 case OPC_PRECR_SRA_QH_PW
:
17852 case OPC_PRECR_SRA_R_QH_PW
:
17853 /* Return value is rt. */
17854 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17856 case OPC_PRECR_OB_QH
:
17857 case OPC_PRECRQ_OB_QH
:
17858 case OPC_PRECRQ_PW_L
:
17859 case OPC_PRECRQ_QH_PW
:
17860 case OPC_PRECRQ_RS_QH_PW
:
17861 case OPC_PRECRQU_S_OB_QH
:
17862 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17864 case OPC_CMPU_EQ_OB
:
17865 case OPC_CMPU_LT_OB
:
17866 case OPC_CMPU_LE_OB
:
17867 case OPC_CMP_EQ_QH
:
17868 case OPC_CMP_LT_QH
:
17869 case OPC_CMP_LE_QH
:
17870 case OPC_CMP_EQ_PW
:
17871 case OPC_CMP_LT_PW
:
17872 case OPC_CMP_LE_PW
:
17873 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17875 case OPC_CMPGDU_EQ_OB
:
17876 case OPC_CMPGDU_LT_OB
:
17877 case OPC_CMPGDU_LE_OB
:
17878 case OPC_CMPGU_EQ_OB
:
17879 case OPC_CMPGU_LT_OB
:
17880 case OPC_CMPGU_LE_OB
:
17881 case OPC_PACKRL_PW
:
17885 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17887 default: /* Invalid */
17888 MIPS_INVAL("MASK CMPU_EQ.OB");
17889 generate_exception_end(ctx
, EXCP_RI
);
17893 case OPC_DAPPEND_DSP
:
17894 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17896 case OPC_DEXTR_W_DSP
:
17897 op2
= MASK_DEXTR_W(ctx
->opcode
);
17904 case OPC_DEXTR_R_L
:
17905 case OPC_DEXTR_RS_L
:
17907 case OPC_DEXTR_R_W
:
17908 case OPC_DEXTR_RS_W
:
17909 case OPC_DEXTR_S_H
:
17911 case OPC_DEXTRV_R_L
:
17912 case OPC_DEXTRV_RS_L
:
17913 case OPC_DEXTRV_S_H
:
17915 case OPC_DEXTRV_R_W
:
17916 case OPC_DEXTRV_RS_W
:
17917 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17922 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17924 default: /* Invalid */
17925 MIPS_INVAL("MASK EXTR.W");
17926 generate_exception_end(ctx
, EXCP_RI
);
17930 case OPC_DPAQ_W_QH_DSP
:
17931 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17933 case OPC_DPAU_H_OBL
:
17934 case OPC_DPAU_H_OBR
:
17935 case OPC_DPSU_H_OBL
:
17936 case OPC_DPSU_H_OBR
:
17938 case OPC_DPAQ_S_W_QH
:
17940 case OPC_DPSQ_S_W_QH
:
17941 case OPC_MULSAQ_S_W_QH
:
17942 case OPC_DPAQ_SA_L_PW
:
17943 case OPC_DPSQ_SA_L_PW
:
17944 case OPC_MULSAQ_S_L_PW
:
17945 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17947 case OPC_MAQ_S_W_QHLL
:
17948 case OPC_MAQ_S_W_QHLR
:
17949 case OPC_MAQ_S_W_QHRL
:
17950 case OPC_MAQ_S_W_QHRR
:
17951 case OPC_MAQ_SA_W_QHLL
:
17952 case OPC_MAQ_SA_W_QHLR
:
17953 case OPC_MAQ_SA_W_QHRL
:
17954 case OPC_MAQ_SA_W_QHRR
:
17955 case OPC_MAQ_S_L_PWL
:
17956 case OPC_MAQ_S_L_PWR
:
17961 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17963 default: /* Invalid */
17964 MIPS_INVAL("MASK DPAQ.W.QH");
17965 generate_exception_end(ctx
, EXCP_RI
);
17969 case OPC_DINSV_DSP
:
17970 op2
= MASK_INSV(ctx
->opcode
);
17981 t0
= tcg_temp_new();
17982 t1
= tcg_temp_new();
17984 gen_load_gpr(t0
, rt
);
17985 gen_load_gpr(t1
, rs
);
17987 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17993 default: /* Invalid */
17994 MIPS_INVAL("MASK DINSV");
17995 generate_exception_end(ctx
, EXCP_RI
);
17999 case OPC_SHLL_OB_DSP
:
18000 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
18003 default: /* Invalid */
18004 MIPS_INVAL("special3_legacy");
18005 generate_exception_end(ctx
, EXCP_RI
);
18010 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
18012 int rs
, rt
, rd
, sa
;
18015 rs
= (ctx
->opcode
>> 21) & 0x1f;
18016 rt
= (ctx
->opcode
>> 16) & 0x1f;
18017 rd
= (ctx
->opcode
>> 11) & 0x1f;
18018 sa
= (ctx
->opcode
>> 6) & 0x1f;
18020 op1
= MASK_SPECIAL3(ctx
->opcode
);
18024 check_insn(ctx
, ISA_MIPS32R2
);
18025 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18028 op2
= MASK_BSHFL(ctx
->opcode
);
18030 case OPC_ALIGN
... OPC_ALIGN_END
:
18032 check_insn(ctx
, ISA_MIPS32R6
);
18033 decode_opc_special3_r6(env
, ctx
);
18036 check_insn(ctx
, ISA_MIPS32R2
);
18037 gen_bshfl(ctx
, op2
, rt
, rd
);
18041 #if defined(TARGET_MIPS64)
18042 case OPC_DEXTM
... OPC_DEXT
:
18043 case OPC_DINSM
... OPC_DINS
:
18044 check_insn(ctx
, ISA_MIPS64R2
);
18045 check_mips_64(ctx
);
18046 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18049 op2
= MASK_DBSHFL(ctx
->opcode
);
18051 case OPC_DALIGN
... OPC_DALIGN_END
:
18053 check_insn(ctx
, ISA_MIPS32R6
);
18054 decode_opc_special3_r6(env
, ctx
);
18057 check_insn(ctx
, ISA_MIPS64R2
);
18058 check_mips_64(ctx
);
18059 op2
= MASK_DBSHFL(ctx
->opcode
);
18060 gen_bshfl(ctx
, op2
, rt
, rd
);
18066 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18069 check_insn(ctx
, ASE_MT
);
18071 TCGv t0
= tcg_temp_new();
18072 TCGv t1
= tcg_temp_new();
18074 gen_load_gpr(t0
, rt
);
18075 gen_load_gpr(t1
, rs
);
18076 gen_helper_fork(t0
, t1
);
18082 check_insn(ctx
, ASE_MT
);
18084 TCGv t0
= tcg_temp_new();
18086 gen_load_gpr(t0
, rs
);
18087 gen_helper_yield(t0
, cpu_env
, t0
);
18088 gen_store_gpr(t0
, rd
);
18093 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18094 decode_opc_special3_r6(env
, ctx
);
18096 decode_opc_special3_legacy(env
, ctx
);
18101 /* MIPS SIMD Architecture (MSA) */
18102 static inline int check_msa_access(DisasContext
*ctx
)
18104 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18105 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18106 generate_exception_end(ctx
, EXCP_RI
);
18110 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18111 if (ctx
->insn_flags
& ASE_MSA
) {
18112 generate_exception_end(ctx
, EXCP_MSADIS
);
18115 generate_exception_end(ctx
, EXCP_RI
);
18122 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18124 /* generates tcg ops to check if any element is 0 */
18125 /* Note this function only works with MSA_WRLEN = 128 */
18126 uint64_t eval_zero_or_big
= 0;
18127 uint64_t eval_big
= 0;
18128 TCGv_i64 t0
= tcg_temp_new_i64();
18129 TCGv_i64 t1
= tcg_temp_new_i64();
18132 eval_zero_or_big
= 0x0101010101010101ULL
;
18133 eval_big
= 0x8080808080808080ULL
;
18136 eval_zero_or_big
= 0x0001000100010001ULL
;
18137 eval_big
= 0x8000800080008000ULL
;
18140 eval_zero_or_big
= 0x0000000100000001ULL
;
18141 eval_big
= 0x8000000080000000ULL
;
18144 eval_zero_or_big
= 0x0000000000000001ULL
;
18145 eval_big
= 0x8000000000000000ULL
;
18148 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18149 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18150 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18151 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18152 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18153 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18154 tcg_gen_or_i64(t0
, t0
, t1
);
18155 /* if all bits are zero then all elements are not zero */
18156 /* if some bit is non-zero then some element is zero */
18157 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18158 tcg_gen_trunc_i64_tl(tresult
, t0
);
18159 tcg_temp_free_i64(t0
);
18160 tcg_temp_free_i64(t1
);
18163 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18165 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18166 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18167 int64_t s16
= (int16_t)ctx
->opcode
;
18169 check_msa_access(ctx
);
18171 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18172 generate_exception_end(ctx
, EXCP_RI
);
18179 TCGv_i64 t0
= tcg_temp_new_i64();
18180 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18181 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18182 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18183 tcg_gen_trunc_i64_tl(bcond
, t0
);
18184 tcg_temp_free_i64(t0
);
18191 gen_check_zero_element(bcond
, df
, wt
);
18197 gen_check_zero_element(bcond
, df
, wt
);
18198 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18202 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18204 ctx
->hflags
|= MIPS_HFLAG_BC
;
18205 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18208 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18210 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18211 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18212 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18213 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18215 TCGv_i32 twd
= tcg_const_i32(wd
);
18216 TCGv_i32 tws
= tcg_const_i32(ws
);
18217 TCGv_i32 ti8
= tcg_const_i32(i8
);
18219 switch (MASK_MSA_I8(ctx
->opcode
)) {
18221 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18224 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18227 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18230 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18233 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18236 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18239 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18245 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18246 if (df
== DF_DOUBLE
) {
18247 generate_exception_end(ctx
, EXCP_RI
);
18249 TCGv_i32 tdf
= tcg_const_i32(df
);
18250 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18251 tcg_temp_free_i32(tdf
);
18256 MIPS_INVAL("MSA instruction");
18257 generate_exception_end(ctx
, EXCP_RI
);
18261 tcg_temp_free_i32(twd
);
18262 tcg_temp_free_i32(tws
);
18263 tcg_temp_free_i32(ti8
);
18266 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18268 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18269 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18270 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18271 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18272 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18273 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18275 TCGv_i32 tdf
= tcg_const_i32(df
);
18276 TCGv_i32 twd
= tcg_const_i32(wd
);
18277 TCGv_i32 tws
= tcg_const_i32(ws
);
18278 TCGv_i32 timm
= tcg_temp_new_i32();
18279 tcg_gen_movi_i32(timm
, u5
);
18281 switch (MASK_MSA_I5(ctx
->opcode
)) {
18283 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18286 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18288 case OPC_MAXI_S_df
:
18289 tcg_gen_movi_i32(timm
, s5
);
18290 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18292 case OPC_MAXI_U_df
:
18293 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18295 case OPC_MINI_S_df
:
18296 tcg_gen_movi_i32(timm
, s5
);
18297 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18299 case OPC_MINI_U_df
:
18300 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18303 tcg_gen_movi_i32(timm
, s5
);
18304 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18306 case OPC_CLTI_S_df
:
18307 tcg_gen_movi_i32(timm
, s5
);
18308 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18310 case OPC_CLTI_U_df
:
18311 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18313 case OPC_CLEI_S_df
:
18314 tcg_gen_movi_i32(timm
, s5
);
18315 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18317 case OPC_CLEI_U_df
:
18318 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18322 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18323 tcg_gen_movi_i32(timm
, s10
);
18324 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18328 MIPS_INVAL("MSA instruction");
18329 generate_exception_end(ctx
, EXCP_RI
);
18333 tcg_temp_free_i32(tdf
);
18334 tcg_temp_free_i32(twd
);
18335 tcg_temp_free_i32(tws
);
18336 tcg_temp_free_i32(timm
);
18339 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18341 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18342 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18343 uint32_t df
= 0, m
= 0;
18344 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18345 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18352 if ((dfm
& 0x40) == 0x00) {
18355 } else if ((dfm
& 0x60) == 0x40) {
18358 } else if ((dfm
& 0x70) == 0x60) {
18361 } else if ((dfm
& 0x78) == 0x70) {
18365 generate_exception_end(ctx
, EXCP_RI
);
18369 tdf
= tcg_const_i32(df
);
18370 tm
= tcg_const_i32(m
);
18371 twd
= tcg_const_i32(wd
);
18372 tws
= tcg_const_i32(ws
);
18374 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18376 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18379 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18382 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18385 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18388 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18391 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18393 case OPC_BINSLI_df
:
18394 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18396 case OPC_BINSRI_df
:
18397 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18400 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18403 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18406 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18409 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18412 MIPS_INVAL("MSA instruction");
18413 generate_exception_end(ctx
, EXCP_RI
);
18417 tcg_temp_free_i32(tdf
);
18418 tcg_temp_free_i32(tm
);
18419 tcg_temp_free_i32(twd
);
18420 tcg_temp_free_i32(tws
);
18423 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18425 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18426 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18427 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18428 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18429 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18431 TCGv_i32 tdf
= tcg_const_i32(df
);
18432 TCGv_i32 twd
= tcg_const_i32(wd
);
18433 TCGv_i32 tws
= tcg_const_i32(ws
);
18434 TCGv_i32 twt
= tcg_const_i32(wt
);
18436 switch (MASK_MSA_3R(ctx
->opcode
)) {
18438 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18441 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18444 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18447 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18449 case OPC_SUBS_S_df
:
18450 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18453 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18456 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18459 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18462 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18465 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18467 case OPC_ADDS_A_df
:
18468 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18470 case OPC_SUBS_U_df
:
18471 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18474 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18477 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18480 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18483 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18486 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18489 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18491 case OPC_ADDS_S_df
:
18492 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18494 case OPC_SUBSUS_U_df
:
18495 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18501 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18504 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18507 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18510 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18513 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18515 case OPC_ADDS_U_df
:
18516 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18518 case OPC_SUBSUU_S_df
:
18519 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18522 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18525 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18528 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18531 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18534 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18536 case OPC_ASUB_S_df
:
18537 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18540 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18543 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18546 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18552 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18557 case OPC_ASUB_U_df
:
18558 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18561 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18564 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18567 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18572 case OPC_AVER_S_df
:
18573 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18576 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18579 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18582 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18585 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18587 case OPC_AVER_U_df
:
18588 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18591 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18594 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18597 case OPC_DOTP_S_df
:
18598 case OPC_DOTP_U_df
:
18599 case OPC_DPADD_S_df
:
18600 case OPC_DPADD_U_df
:
18601 case OPC_DPSUB_S_df
:
18602 case OPC_HADD_S_df
:
18603 case OPC_DPSUB_U_df
:
18604 case OPC_HADD_U_df
:
18605 case OPC_HSUB_S_df
:
18606 case OPC_HSUB_U_df
:
18607 if (df
== DF_BYTE
) {
18608 generate_exception_end(ctx
, EXCP_RI
);
18611 switch (MASK_MSA_3R(ctx
->opcode
)) {
18612 case OPC_DOTP_S_df
:
18613 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18615 case OPC_DOTP_U_df
:
18616 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18618 case OPC_DPADD_S_df
:
18619 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18621 case OPC_DPADD_U_df
:
18622 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18624 case OPC_DPSUB_S_df
:
18625 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18627 case OPC_HADD_S_df
:
18628 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18630 case OPC_DPSUB_U_df
:
18631 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18633 case OPC_HADD_U_df
:
18634 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18636 case OPC_HSUB_S_df
:
18637 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18639 case OPC_HSUB_U_df
:
18640 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18645 MIPS_INVAL("MSA instruction");
18646 generate_exception_end(ctx
, EXCP_RI
);
18649 tcg_temp_free_i32(twd
);
18650 tcg_temp_free_i32(tws
);
18651 tcg_temp_free_i32(twt
);
18652 tcg_temp_free_i32(tdf
);
18655 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18657 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18658 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18659 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18660 TCGv telm
= tcg_temp_new();
18661 TCGv_i32 tsr
= tcg_const_i32(source
);
18662 TCGv_i32 tdt
= tcg_const_i32(dest
);
18664 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18666 gen_load_gpr(telm
, source
);
18667 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18670 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18671 gen_store_gpr(telm
, dest
);
18674 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18677 MIPS_INVAL("MSA instruction");
18678 generate_exception_end(ctx
, EXCP_RI
);
18682 tcg_temp_free(telm
);
18683 tcg_temp_free_i32(tdt
);
18684 tcg_temp_free_i32(tsr
);
18687 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18690 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18691 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18692 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18694 TCGv_i32 tws
= tcg_const_i32(ws
);
18695 TCGv_i32 twd
= tcg_const_i32(wd
);
18696 TCGv_i32 tn
= tcg_const_i32(n
);
18697 TCGv_i32 tdf
= tcg_const_i32(df
);
18699 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18701 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18703 case OPC_SPLATI_df
:
18704 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18707 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18709 case OPC_COPY_S_df
:
18710 case OPC_COPY_U_df
:
18711 case OPC_INSERT_df
:
18712 #if !defined(TARGET_MIPS64)
18713 /* Double format valid only for MIPS64 */
18714 if (df
== DF_DOUBLE
) {
18715 generate_exception_end(ctx
, EXCP_RI
);
18719 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18720 case OPC_COPY_S_df
:
18721 if (likely(wd
!= 0)) {
18722 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18725 case OPC_COPY_U_df
:
18726 if (likely(wd
!= 0)) {
18727 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18730 case OPC_INSERT_df
:
18731 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18736 MIPS_INVAL("MSA instruction");
18737 generate_exception_end(ctx
, EXCP_RI
);
18739 tcg_temp_free_i32(twd
);
18740 tcg_temp_free_i32(tws
);
18741 tcg_temp_free_i32(tn
);
18742 tcg_temp_free_i32(tdf
);
18745 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18747 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18748 uint32_t df
= 0, n
= 0;
18750 if ((dfn
& 0x30) == 0x00) {
18753 } else if ((dfn
& 0x38) == 0x20) {
18756 } else if ((dfn
& 0x3c) == 0x30) {
18759 } else if ((dfn
& 0x3e) == 0x38) {
18762 } else if (dfn
== 0x3E) {
18763 /* CTCMSA, CFCMSA, MOVE.V */
18764 gen_msa_elm_3e(env
, ctx
);
18767 generate_exception_end(ctx
, EXCP_RI
);
18771 gen_msa_elm_df(env
, ctx
, df
, n
);
18774 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18776 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18777 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18778 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18779 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18780 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18782 TCGv_i32 twd
= tcg_const_i32(wd
);
18783 TCGv_i32 tws
= tcg_const_i32(ws
);
18784 TCGv_i32 twt
= tcg_const_i32(wt
);
18785 TCGv_i32 tdf
= tcg_temp_new_i32();
18787 /* adjust df value for floating-point instruction */
18788 tcg_gen_movi_i32(tdf
, df
+ 2);
18790 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18792 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18795 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18798 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18801 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18804 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18807 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18822 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18825 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 tcg_gen_movi_i32(tdf
, df
+ 1);
18832 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18835 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18838 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18840 case OPC_MADD_Q_df
:
18841 tcg_gen_movi_i32(tdf
, df
+ 1);
18842 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18845 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18847 case OPC_MSUB_Q_df
:
18848 tcg_gen_movi_i32(tdf
, df
+ 1);
18849 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18852 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18858 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18861 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18864 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18867 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18870 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18873 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18876 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18879 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18882 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18885 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18888 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18890 case OPC_MULR_Q_df
:
18891 tcg_gen_movi_i32(tdf
, df
+ 1);
18892 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18895 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18897 case OPC_FMIN_A_df
:
18898 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18900 case OPC_MADDR_Q_df
:
18901 tcg_gen_movi_i32(tdf
, df
+ 1);
18902 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18905 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18908 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18910 case OPC_MSUBR_Q_df
:
18911 tcg_gen_movi_i32(tdf
, df
+ 1);
18912 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18915 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18917 case OPC_FMAX_A_df
:
18918 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18921 MIPS_INVAL("MSA instruction");
18922 generate_exception_end(ctx
, EXCP_RI
);
18926 tcg_temp_free_i32(twd
);
18927 tcg_temp_free_i32(tws
);
18928 tcg_temp_free_i32(twt
);
18929 tcg_temp_free_i32(tdf
);
18932 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18934 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18935 (op & (0x7 << 18)))
18936 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18937 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18938 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18939 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18940 TCGv_i32 twd
= tcg_const_i32(wd
);
18941 TCGv_i32 tws
= tcg_const_i32(ws
);
18942 TCGv_i32 twt
= tcg_const_i32(wt
);
18943 TCGv_i32 tdf
= tcg_const_i32(df
);
18945 switch (MASK_MSA_2R(ctx
->opcode
)) {
18947 #if !defined(TARGET_MIPS64)
18948 /* Double format valid only for MIPS64 */
18949 if (df
== DF_DOUBLE
) {
18950 generate_exception_end(ctx
, EXCP_RI
);
18954 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18957 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18960 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18963 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18966 MIPS_INVAL("MSA instruction");
18967 generate_exception_end(ctx
, EXCP_RI
);
18971 tcg_temp_free_i32(twd
);
18972 tcg_temp_free_i32(tws
);
18973 tcg_temp_free_i32(twt
);
18974 tcg_temp_free_i32(tdf
);
18977 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18979 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18980 (op & (0xf << 17)))
18981 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18982 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18983 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18984 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18985 TCGv_i32 twd
= tcg_const_i32(wd
);
18986 TCGv_i32 tws
= tcg_const_i32(ws
);
18987 TCGv_i32 twt
= tcg_const_i32(wt
);
18988 /* adjust df value for floating-point instruction */
18989 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18991 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18992 case OPC_FCLASS_df
:
18993 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18995 case OPC_FTRUNC_S_df
:
18996 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18998 case OPC_FTRUNC_U_df
:
18999 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
19002 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
19004 case OPC_FRSQRT_df
:
19005 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
19008 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
19011 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
19014 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
19016 case OPC_FEXUPL_df
:
19017 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
19019 case OPC_FEXUPR_df
:
19020 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
19023 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
19026 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
19028 case OPC_FTINT_S_df
:
19029 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
19031 case OPC_FTINT_U_df
:
19032 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
19034 case OPC_FFINT_S_df
:
19035 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
19037 case OPC_FFINT_U_df
:
19038 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19042 tcg_temp_free_i32(twd
);
19043 tcg_temp_free_i32(tws
);
19044 tcg_temp_free_i32(twt
);
19045 tcg_temp_free_i32(tdf
);
19048 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19050 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19051 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19052 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19053 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19054 TCGv_i32 twd
= tcg_const_i32(wd
);
19055 TCGv_i32 tws
= tcg_const_i32(ws
);
19056 TCGv_i32 twt
= tcg_const_i32(wt
);
19058 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19060 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19063 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19066 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19069 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19072 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19075 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19078 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19081 MIPS_INVAL("MSA instruction");
19082 generate_exception_end(ctx
, EXCP_RI
);
19086 tcg_temp_free_i32(twd
);
19087 tcg_temp_free_i32(tws
);
19088 tcg_temp_free_i32(twt
);
19091 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19093 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19101 gen_msa_vec_v(env
, ctx
);
19104 gen_msa_2r(env
, ctx
);
19107 gen_msa_2rf(env
, ctx
);
19110 MIPS_INVAL("MSA instruction");
19111 generate_exception_end(ctx
, EXCP_RI
);
19116 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19118 uint32_t opcode
= ctx
->opcode
;
19119 check_insn(ctx
, ASE_MSA
);
19120 check_msa_access(ctx
);
19122 switch (MASK_MSA_MINOR(opcode
)) {
19123 case OPC_MSA_I8_00
:
19124 case OPC_MSA_I8_01
:
19125 case OPC_MSA_I8_02
:
19126 gen_msa_i8(env
, ctx
);
19128 case OPC_MSA_I5_06
:
19129 case OPC_MSA_I5_07
:
19130 gen_msa_i5(env
, ctx
);
19132 case OPC_MSA_BIT_09
:
19133 case OPC_MSA_BIT_0A
:
19134 gen_msa_bit(env
, ctx
);
19136 case OPC_MSA_3R_0D
:
19137 case OPC_MSA_3R_0E
:
19138 case OPC_MSA_3R_0F
:
19139 case OPC_MSA_3R_10
:
19140 case OPC_MSA_3R_11
:
19141 case OPC_MSA_3R_12
:
19142 case OPC_MSA_3R_13
:
19143 case OPC_MSA_3R_14
:
19144 case OPC_MSA_3R_15
:
19145 gen_msa_3r(env
, ctx
);
19148 gen_msa_elm(env
, ctx
);
19150 case OPC_MSA_3RF_1A
:
19151 case OPC_MSA_3RF_1B
:
19152 case OPC_MSA_3RF_1C
:
19153 gen_msa_3rf(env
, ctx
);
19156 gen_msa_vec(env
, ctx
);
19167 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19168 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19169 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19170 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19172 TCGv_i32 twd
= tcg_const_i32(wd
);
19173 TCGv taddr
= tcg_temp_new();
19174 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19176 switch (MASK_MSA_MINOR(opcode
)) {
19178 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19181 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19184 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19187 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19190 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19193 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19196 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19199 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19203 tcg_temp_free_i32(twd
);
19204 tcg_temp_free(taddr
);
19208 MIPS_INVAL("MSA instruction");
19209 generate_exception_end(ctx
, EXCP_RI
);
19215 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19218 int rs
, rt
, rd
, sa
;
19222 /* make sure instructions are on a word boundary */
19223 if (ctx
->pc
& 0x3) {
19224 env
->CP0_BadVAddr
= ctx
->pc
;
19225 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19229 /* Handle blikely not taken case */
19230 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19231 TCGLabel
*l1
= gen_new_label();
19233 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19234 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19235 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19239 op
= MASK_OP_MAJOR(ctx
->opcode
);
19240 rs
= (ctx
->opcode
>> 21) & 0x1f;
19241 rt
= (ctx
->opcode
>> 16) & 0x1f;
19242 rd
= (ctx
->opcode
>> 11) & 0x1f;
19243 sa
= (ctx
->opcode
>> 6) & 0x1f;
19244 imm
= (int16_t)ctx
->opcode
;
19247 decode_opc_special(env
, ctx
);
19250 decode_opc_special2_legacy(env
, ctx
);
19253 decode_opc_special3(env
, ctx
);
19256 op1
= MASK_REGIMM(ctx
->opcode
);
19258 case OPC_BLTZL
: /* REGIMM branches */
19262 check_insn(ctx
, ISA_MIPS2
);
19263 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19267 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19271 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19273 /* OPC_NAL, OPC_BAL */
19274 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19276 generate_exception_end(ctx
, EXCP_RI
);
19279 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19282 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19284 check_insn(ctx
, ISA_MIPS2
);
19285 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19286 gen_trap(ctx
, op1
, rs
, -1, imm
);
19289 check_insn(ctx
, ISA_MIPS32R6
);
19290 generate_exception_end(ctx
, EXCP_RI
);
19293 check_insn(ctx
, ISA_MIPS32R2
);
19294 /* Break the TB to be able to sync copied instructions
19296 ctx
->bstate
= BS_STOP
;
19298 case OPC_BPOSGE32
: /* MIPS DSP branch */
19299 #if defined(TARGET_MIPS64)
19303 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19305 #if defined(TARGET_MIPS64)
19307 check_insn(ctx
, ISA_MIPS32R6
);
19308 check_mips_64(ctx
);
19310 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19314 check_insn(ctx
, ISA_MIPS32R6
);
19315 check_mips_64(ctx
);
19317 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19321 default: /* Invalid */
19322 MIPS_INVAL("regimm");
19323 generate_exception_end(ctx
, EXCP_RI
);
19328 check_cp0_enabled(ctx
);
19329 op1
= MASK_CP0(ctx
->opcode
);
19337 #if defined(TARGET_MIPS64)
19341 #ifndef CONFIG_USER_ONLY
19342 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19343 #endif /* !CONFIG_USER_ONLY */
19345 case OPC_C0_FIRST
... OPC_C0_LAST
:
19346 #ifndef CONFIG_USER_ONLY
19347 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19348 #endif /* !CONFIG_USER_ONLY */
19351 #ifndef CONFIG_USER_ONLY
19354 TCGv t0
= tcg_temp_new();
19356 op2
= MASK_MFMC0(ctx
->opcode
);
19359 check_insn(ctx
, ASE_MT
);
19360 gen_helper_dmt(t0
);
19361 gen_store_gpr(t0
, rt
);
19364 check_insn(ctx
, ASE_MT
);
19365 gen_helper_emt(t0
);
19366 gen_store_gpr(t0
, rt
);
19369 check_insn(ctx
, ASE_MT
);
19370 gen_helper_dvpe(t0
, cpu_env
);
19371 gen_store_gpr(t0
, rt
);
19374 check_insn(ctx
, ASE_MT
);
19375 gen_helper_evpe(t0
, cpu_env
);
19376 gen_store_gpr(t0
, rt
);
19379 check_insn(ctx
, ISA_MIPS32R6
);
19381 gen_helper_dvp(t0
, cpu_env
);
19382 gen_store_gpr(t0
, rt
);
19386 check_insn(ctx
, ISA_MIPS32R6
);
19388 gen_helper_evp(t0
, cpu_env
);
19389 gen_store_gpr(t0
, rt
);
19393 check_insn(ctx
, ISA_MIPS32R2
);
19394 save_cpu_state(ctx
, 1);
19395 gen_helper_di(t0
, cpu_env
);
19396 gen_store_gpr(t0
, rt
);
19397 /* Stop translation as we may have switched
19398 the execution mode. */
19399 ctx
->bstate
= BS_STOP
;
19402 check_insn(ctx
, ISA_MIPS32R2
);
19403 save_cpu_state(ctx
, 1);
19404 gen_helper_ei(t0
, cpu_env
);
19405 gen_store_gpr(t0
, rt
);
19406 /* Stop translation as we may have switched
19407 the execution mode. */
19408 ctx
->bstate
= BS_STOP
;
19410 default: /* Invalid */
19411 MIPS_INVAL("mfmc0");
19412 generate_exception_end(ctx
, EXCP_RI
);
19417 #endif /* !CONFIG_USER_ONLY */
19420 check_insn(ctx
, ISA_MIPS32R2
);
19421 gen_load_srsgpr(rt
, rd
);
19424 check_insn(ctx
, ISA_MIPS32R2
);
19425 gen_store_srsgpr(rt
, rd
);
19429 generate_exception_end(ctx
, EXCP_RI
);
19433 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19434 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19435 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19436 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19439 /* Arithmetic with immediate opcode */
19440 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19444 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19446 case OPC_SLTI
: /* Set on less than with immediate opcode */
19448 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19450 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19451 case OPC_LUI
: /* OPC_AUI */
19454 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19456 case OPC_J
... OPC_JAL
: /* Jump */
19457 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19458 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19461 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19462 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19464 generate_exception_end(ctx
, EXCP_RI
);
19467 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19468 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19471 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19474 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19475 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19477 generate_exception_end(ctx
, EXCP_RI
);
19480 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19481 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19484 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19487 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19490 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19492 check_insn(ctx
, ISA_MIPS32R6
);
19493 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19494 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19497 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19500 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19502 check_insn(ctx
, ISA_MIPS32R6
);
19503 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19504 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19509 check_insn(ctx
, ISA_MIPS2
);
19510 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19514 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19516 case OPC_LL
: /* Load and stores */
19517 check_insn(ctx
, ISA_MIPS2
);
19521 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19523 case OPC_LB
... OPC_LH
:
19524 case OPC_LW
... OPC_LHU
:
19525 gen_ld(ctx
, op
, rt
, rs
, imm
);
19529 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19531 case OPC_SB
... OPC_SH
:
19533 gen_st(ctx
, op
, rt
, rs
, imm
);
19536 check_insn(ctx
, ISA_MIPS2
);
19537 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19538 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19541 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19542 check_cp0_enabled(ctx
);
19543 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19544 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19545 gen_cache_operation(ctx
, rt
, rs
, imm
);
19547 /* Treat as NOP. */
19550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19551 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19552 /* Treat as NOP. */
19555 /* Floating point (COP1). */
19560 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19564 op1
= MASK_CP1(ctx
->opcode
);
19569 check_cp1_enabled(ctx
);
19570 check_insn(ctx
, ISA_MIPS32R2
);
19575 check_cp1_enabled(ctx
);
19576 gen_cp1(ctx
, op1
, rt
, rd
);
19578 #if defined(TARGET_MIPS64)
19581 check_cp1_enabled(ctx
);
19582 check_insn(ctx
, ISA_MIPS3
);
19583 check_mips_64(ctx
);
19584 gen_cp1(ctx
, op1
, rt
, rd
);
19587 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19588 check_cp1_enabled(ctx
);
19589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19591 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19596 check_insn(ctx
, ASE_MIPS3D
);
19597 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19598 (rt
>> 2) & 0x7, imm
<< 2);
19602 check_cp1_enabled(ctx
);
19603 check_insn(ctx
, ISA_MIPS32R6
);
19604 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19608 check_cp1_enabled(ctx
);
19609 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19611 check_insn(ctx
, ASE_MIPS3D
);
19614 check_cp1_enabled(ctx
);
19615 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19616 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19617 (rt
>> 2) & 0x7, imm
<< 2);
19624 check_cp1_enabled(ctx
);
19625 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19631 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19632 check_cp1_enabled(ctx
);
19633 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19635 case R6_OPC_CMP_AF_S
:
19636 case R6_OPC_CMP_UN_S
:
19637 case R6_OPC_CMP_EQ_S
:
19638 case R6_OPC_CMP_UEQ_S
:
19639 case R6_OPC_CMP_LT_S
:
19640 case R6_OPC_CMP_ULT_S
:
19641 case R6_OPC_CMP_LE_S
:
19642 case R6_OPC_CMP_ULE_S
:
19643 case R6_OPC_CMP_SAF_S
:
19644 case R6_OPC_CMP_SUN_S
:
19645 case R6_OPC_CMP_SEQ_S
:
19646 case R6_OPC_CMP_SEUQ_S
:
19647 case R6_OPC_CMP_SLT_S
:
19648 case R6_OPC_CMP_SULT_S
:
19649 case R6_OPC_CMP_SLE_S
:
19650 case R6_OPC_CMP_SULE_S
:
19651 case R6_OPC_CMP_OR_S
:
19652 case R6_OPC_CMP_UNE_S
:
19653 case R6_OPC_CMP_NE_S
:
19654 case R6_OPC_CMP_SOR_S
:
19655 case R6_OPC_CMP_SUNE_S
:
19656 case R6_OPC_CMP_SNE_S
:
19657 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19659 case R6_OPC_CMP_AF_D
:
19660 case R6_OPC_CMP_UN_D
:
19661 case R6_OPC_CMP_EQ_D
:
19662 case R6_OPC_CMP_UEQ_D
:
19663 case R6_OPC_CMP_LT_D
:
19664 case R6_OPC_CMP_ULT_D
:
19665 case R6_OPC_CMP_LE_D
:
19666 case R6_OPC_CMP_ULE_D
:
19667 case R6_OPC_CMP_SAF_D
:
19668 case R6_OPC_CMP_SUN_D
:
19669 case R6_OPC_CMP_SEQ_D
:
19670 case R6_OPC_CMP_SEUQ_D
:
19671 case R6_OPC_CMP_SLT_D
:
19672 case R6_OPC_CMP_SULT_D
:
19673 case R6_OPC_CMP_SLE_D
:
19674 case R6_OPC_CMP_SULE_D
:
19675 case R6_OPC_CMP_OR_D
:
19676 case R6_OPC_CMP_UNE_D
:
19677 case R6_OPC_CMP_NE_D
:
19678 case R6_OPC_CMP_SOR_D
:
19679 case R6_OPC_CMP_SUNE_D
:
19680 case R6_OPC_CMP_SNE_D
:
19681 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19684 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19685 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19690 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19705 check_insn(ctx
, ASE_MSA
);
19706 gen_msa_branch(env
, ctx
, op1
);
19710 generate_exception_end(ctx
, EXCP_RI
);
19715 /* Compact branches [R6] and COP2 [non-R6] */
19716 case OPC_BC
: /* OPC_LWC2 */
19717 case OPC_BALC
: /* OPC_SWC2 */
19718 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19719 /* OPC_BC, OPC_BALC */
19720 gen_compute_compact_branch(ctx
, op
, 0, 0,
19721 sextract32(ctx
->opcode
<< 2, 0, 28));
19723 /* OPC_LWC2, OPC_SWC2 */
19724 /* COP2: Not implemented. */
19725 generate_exception_err(ctx
, EXCP_CpU
, 2);
19728 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19729 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19730 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19732 /* OPC_BEQZC, OPC_BNEZC */
19733 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19734 sextract32(ctx
->opcode
<< 2, 0, 23));
19736 /* OPC_JIC, OPC_JIALC */
19737 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19740 /* OPC_LWC2, OPC_SWC2 */
19741 /* COP2: Not implemented. */
19742 generate_exception_err(ctx
, EXCP_CpU
, 2);
19746 check_insn(ctx
, INSN_LOONGSON2F
);
19747 /* Note that these instructions use different fields. */
19748 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19752 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19753 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19754 check_cp1_enabled(ctx
);
19755 op1
= MASK_CP3(ctx
->opcode
);
19759 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19765 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19766 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19769 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19770 /* Treat as NOP. */
19773 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19787 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19788 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19792 generate_exception_end(ctx
, EXCP_RI
);
19796 generate_exception_err(ctx
, EXCP_CpU
, 1);
19800 #if defined(TARGET_MIPS64)
19801 /* MIPS64 opcodes */
19802 case OPC_LDL
... OPC_LDR
:
19804 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19808 check_insn(ctx
, ISA_MIPS3
);
19809 check_mips_64(ctx
);
19810 gen_ld(ctx
, op
, rt
, rs
, imm
);
19812 case OPC_SDL
... OPC_SDR
:
19813 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19816 check_insn(ctx
, ISA_MIPS3
);
19817 check_mips_64(ctx
);
19818 gen_st(ctx
, op
, rt
, rs
, imm
);
19821 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19822 check_insn(ctx
, ISA_MIPS3
);
19823 check_mips_64(ctx
);
19824 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19826 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19827 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19828 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19829 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19832 check_insn(ctx
, ISA_MIPS3
);
19833 check_mips_64(ctx
);
19834 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19838 check_insn(ctx
, ISA_MIPS3
);
19839 check_mips_64(ctx
);
19840 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19843 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19844 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19845 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19847 MIPS_INVAL("major opcode");
19848 generate_exception_end(ctx
, EXCP_RI
);
19852 case OPC_DAUI
: /* OPC_JALX */
19853 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19854 #if defined(TARGET_MIPS64)
19856 check_mips_64(ctx
);
19858 generate_exception(ctx
, EXCP_RI
);
19859 } else if (rt
!= 0) {
19860 TCGv t0
= tcg_temp_new();
19861 gen_load_gpr(t0
, rs
);
19862 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19866 generate_exception_end(ctx
, EXCP_RI
);
19867 MIPS_INVAL("major opcode");
19871 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19872 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19873 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19876 case OPC_MSA
: /* OPC_MDMX */
19877 /* MDMX: Not implemented. */
19881 check_insn(ctx
, ISA_MIPS32R6
);
19882 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19884 default: /* Invalid */
19885 MIPS_INVAL("major opcode");
19886 generate_exception_end(ctx
, EXCP_RI
);
19891 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19893 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19894 CPUState
*cs
= CPU(cpu
);
19896 target_ulong pc_start
;
19897 target_ulong next_page_start
;
19904 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19907 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19908 ctx
.insn_flags
= env
->insn_flags
;
19909 ctx
.CP0_Config1
= env
->CP0_Config1
;
19911 ctx
.bstate
= BS_NONE
;
19913 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19914 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19915 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19916 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19917 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19918 ctx
.PAMask
= env
->PAMask
;
19919 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19920 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19921 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19922 /* Restore delay slot state from the tb context. */
19923 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19924 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19925 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19926 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19927 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19928 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19929 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
19930 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
19931 restore_cpu_state(env
, &ctx
);
19932 #ifdef CONFIG_USER_ONLY
19933 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19935 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19937 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19938 MO_UNALN
: MO_ALIGN
;
19940 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19941 if (max_insns
== 0) {
19942 max_insns
= CF_COUNT_MASK
;
19944 if (max_insns
> TCG_MAX_INSNS
) {
19945 max_insns
= TCG_MAX_INSNS
;
19948 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19950 while (ctx
.bstate
== BS_NONE
) {
19951 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19954 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19955 save_cpu_state(&ctx
, 1);
19956 ctx
.bstate
= BS_BRANCH
;
19957 gen_helper_raise_exception_debug(cpu_env
);
19958 /* The address covered by the breakpoint must be included in
19959 [tb->pc, tb->pc + tb->size) in order to for it to be
19960 properly cleared -- thus we increment the PC here so that
19961 the logic setting tb->size below does the right thing. */
19963 goto done_generating
;
19966 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19970 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19971 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19972 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19974 decode_opc(env
, &ctx
);
19975 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19976 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19977 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19978 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19979 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19980 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19982 generate_exception_end(&ctx
, EXCP_RI
);
19986 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19987 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19988 MIPS_HFLAG_FBNSLOT
))) {
19989 /* force to generate branch as there is neither delay nor
19993 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19994 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19995 /* Force to generate branch as microMIPS R6 doesn't restrict
19996 branches in the forbidden slot. */
20001 gen_branch(&ctx
, insn_bytes
);
20003 ctx
.pc
+= insn_bytes
;
20005 /* Execute a branch and its delay slot as a single instruction.
20006 This is what GDB expects and is consistent with what the
20007 hardware does (e.g. if a delay slot instruction faults, the
20008 reported PC is the PC of the branch). */
20009 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
20013 if (ctx
.pc
>= next_page_start
) {
20017 if (tcg_op_buf_full()) {
20021 if (num_insns
>= max_insns
)
20027 if (tb
->cflags
& CF_LAST_IO
) {
20030 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
20031 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
20032 gen_helper_raise_exception_debug(cpu_env
);
20034 switch (ctx
.bstate
) {
20036 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20039 save_cpu_state(&ctx
, 0);
20040 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20043 tcg_gen_exit_tb(0);
20051 gen_tb_end(tb
, num_insns
);
20053 tb
->size
= ctx
.pc
- pc_start
;
20054 tb
->icount
= num_insns
;
20058 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20059 && qemu_log_in_addr_range(pc_start
)) {
20061 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20062 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20069 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20073 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20075 #define printfpr(fp) \
20078 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20079 " fd:%13g fs:%13g psu: %13g\n", \
20080 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20081 (double)(fp)->fd, \
20082 (double)(fp)->fs[FP_ENDIAN_IDX], \
20083 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20086 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20087 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20088 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20089 " fd:%13g fs:%13g psu:%13g\n", \
20090 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20092 (double)tmp.fs[FP_ENDIAN_IDX], \
20093 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20098 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20099 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20100 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20101 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20102 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20103 printfpr(&env
->active_fpu
.fpr
[i
]);
20109 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20112 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20113 CPUMIPSState
*env
= &cpu
->env
;
20116 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20117 " LO=0x" TARGET_FMT_lx
" ds %04x "
20118 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20119 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20120 env
->hflags
, env
->btarget
, env
->bcond
);
20121 for (i
= 0; i
< 32; i
++) {
20123 cpu_fprintf(f
, "GPR%02d:", i
);
20124 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20126 cpu_fprintf(f
, "\n");
20129 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20130 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20131 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20133 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20134 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20135 env
->CP0_Config2
, env
->CP0_Config3
);
20136 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20137 env
->CP0_Config4
, env
->CP0_Config5
);
20138 if (env
->hflags
& MIPS_HFLAG_FPU
)
20139 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20142 void mips_tcg_init(void)
20147 /* Initialize various static tables. */
20151 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20152 tcg_ctx
.tcg_env
= cpu_env
;
20154 TCGV_UNUSED(cpu_gpr
[0]);
20155 for (i
= 1; i
< 32; i
++)
20156 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20157 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20160 for (i
= 0; i
< 32; i
++) {
20161 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20163 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20164 /* The scalar floating-point unit (FPU) registers are mapped on
20165 * the MSA vector registers. */
20166 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20167 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20168 msa_wr_d
[i
* 2 + 1] =
20169 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20172 cpu_PC
= tcg_global_mem_new(cpu_env
,
20173 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20174 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20175 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20176 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20178 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20179 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20182 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20183 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20185 bcond
= tcg_global_mem_new(cpu_env
,
20186 offsetof(CPUMIPSState
, bcond
), "bcond");
20187 btarget
= tcg_global_mem_new(cpu_env
,
20188 offsetof(CPUMIPSState
, btarget
), "btarget");
20189 hflags
= tcg_global_mem_new_i32(cpu_env
,
20190 offsetof(CPUMIPSState
, hflags
), "hflags");
20192 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20193 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20195 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20196 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20202 #include "translate_init.c"
20204 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20208 const mips_def_t
*def
;
20210 def
= cpu_mips_find_by_name(cpu_model
);
20213 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20215 env
->cpu_model
= def
;
20216 env
->exception_base
= (int32_t)0xBFC00000;
20218 #ifndef CONFIG_USER_ONLY
20219 mmu_init(env
, def
);
20221 fpu_init(env
, def
);
20222 mvp_init(env
, def
);
20224 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20229 bool cpu_supports_cps_smp(const char *cpu_model
)
20231 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20236 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20239 bool cpu_supports_isa(const char *cpu_model
, unsigned int isa
)
20241 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20246 return (def
->insn_flags
& isa
) != 0;
20249 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20251 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20252 vp
->env
.exception_base
= address
;
20255 void cpu_state_reset(CPUMIPSState
*env
)
20257 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20258 CPUState
*cs
= CPU(cpu
);
20260 /* Reset registers to their default values */
20261 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20262 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20263 #ifdef TARGET_WORDS_BIGENDIAN
20264 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20266 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20267 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20268 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20269 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20270 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20271 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20272 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20273 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20274 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20275 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20276 << env
->cpu_model
->CP0_LLAddr_shift
;
20277 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20278 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20279 env
->CCRes
= env
->cpu_model
->CCRes
;
20280 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20281 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20282 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20283 env
->current_tc
= 0;
20284 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20285 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20286 #if defined(TARGET_MIPS64)
20287 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20288 env
->SEGMask
|= 3ULL << 62;
20291 env
->PABITS
= env
->cpu_model
->PABITS
;
20292 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20293 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20294 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20295 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20296 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20297 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20298 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20299 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20300 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20301 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20302 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20303 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20304 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20305 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20306 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20307 env
->msair
= env
->cpu_model
->MSAIR
;
20308 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20310 #if defined(CONFIG_USER_ONLY)
20311 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20312 # ifdef TARGET_MIPS64
20313 /* Enable 64-bit register mode. */
20314 env
->CP0_Status
|= (1 << CP0St_PX
);
20316 # ifdef TARGET_ABI_MIPSN64
20317 /* Enable 64-bit address mode. */
20318 env
->CP0_Status
|= (1 << CP0St_UX
);
20320 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20321 hardware registers. */
20322 env
->CP0_HWREna
|= 0x0000000F;
20323 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20324 env
->CP0_Status
|= (1 << CP0St_CU1
);
20326 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20327 env
->CP0_Status
|= (1 << CP0St_MX
);
20329 # if defined(TARGET_MIPS64)
20330 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20331 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20332 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20333 env
->CP0_Status
|= (1 << CP0St_FR
);
20337 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20338 /* If the exception was raised from a delay slot,
20339 come back to the jump. */
20340 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20341 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20343 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20345 env
->active_tc
.PC
= env
->exception_base
;
20346 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20347 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20348 env
->CP0_Wired
= 0;
20349 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20350 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20351 if (kvm_enabled()) {
20352 env
->CP0_EBase
|= 0x40000000;
20354 env
->CP0_EBase
|= 0x80000000;
20356 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20357 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20359 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20361 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20362 /* vectored interrupts not implemented, timer on int 7,
20363 no performance counters. */
20364 env
->CP0_IntCtl
= 0xe0000000;
20368 for (i
= 0; i
< 7; i
++) {
20369 env
->CP0_WatchLo
[i
] = 0;
20370 env
->CP0_WatchHi
[i
] = 0x80000000;
20372 env
->CP0_WatchLo
[7] = 0;
20373 env
->CP0_WatchHi
[7] = 0;
20375 /* Count register increments in debug mode, EJTAG version 1 */
20376 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20378 cpu_mips_store_count(env
, 1);
20380 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20383 /* Only TC0 on VPE 0 starts as active. */
20384 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20385 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20386 env
->tcs
[i
].CP0_TCHalt
= 1;
20388 env
->active_tc
.CP0_TCHalt
= 1;
20391 if (cs
->cpu_index
== 0) {
20392 /* VPE0 starts up enabled. */
20393 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20394 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20396 /* TC0 starts up unhalted. */
20398 env
->active_tc
.CP0_TCHalt
= 0;
20399 env
->tcs
[0].CP0_TCHalt
= 0;
20400 /* With thread 0 active. */
20401 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20402 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20406 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20407 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20408 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20409 env
->CP0_Status
|= (1 << CP0St_FR
);
20413 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20417 compute_hflags(env
);
20418 restore_fp_status(env
);
20419 restore_pamask(env
);
20420 cs
->exception_index
= EXCP_NONE
;
20422 if (semihosting_get_argc()) {
20423 /* UHI interface can be used to obtain argc and argv */
20424 env
->active_tc
.gpr
[4] = -1;
20428 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20429 target_ulong
*data
)
20431 env
->active_tc
.PC
= data
[0];
20432 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20433 env
->hflags
|= data
[1];
20434 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20435 case MIPS_HFLAG_BR
:
20437 case MIPS_HFLAG_BC
:
20438 case MIPS_HFLAG_BL
:
20440 env
->btarget
= data
[2];