2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
28 #include "exec/cpu_ldst.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "sysemu/kvm.h"
33 #include "exec/semihost.h"
35 #include "trace-tcg.h"
38 #define MIPS_DEBUG_DISAS 0
40 /* MIPS major opcodes */
41 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
44 /* indirect opcode tables */
45 OPC_SPECIAL
= (0x00 << 26),
46 OPC_REGIMM
= (0x01 << 26),
47 OPC_CP0
= (0x10 << 26),
48 OPC_CP1
= (0x11 << 26),
49 OPC_CP2
= (0x12 << 26),
50 OPC_CP3
= (0x13 << 26),
51 OPC_SPECIAL2
= (0x1C << 26),
52 OPC_SPECIAL3
= (0x1F << 26),
53 /* arithmetic with immediate */
54 OPC_ADDI
= (0x08 << 26),
55 OPC_ADDIU
= (0x09 << 26),
56 OPC_SLTI
= (0x0A << 26),
57 OPC_SLTIU
= (0x0B << 26),
58 /* logic with immediate */
59 OPC_ANDI
= (0x0C << 26),
60 OPC_ORI
= (0x0D << 26),
61 OPC_XORI
= (0x0E << 26),
62 OPC_LUI
= (0x0F << 26),
63 /* arithmetic with immediate */
64 OPC_DADDI
= (0x18 << 26),
65 OPC_DADDIU
= (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL
= (0x03 << 26),
69 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL
= (0x14 << 26),
71 OPC_BNE
= (0x05 << 26),
72 OPC_BNEL
= (0x15 << 26),
73 OPC_BLEZ
= (0x06 << 26),
74 OPC_BLEZL
= (0x16 << 26),
75 OPC_BGTZ
= (0x07 << 26),
76 OPC_BGTZL
= (0x17 << 26),
77 OPC_JALX
= (0x1D << 26),
78 OPC_DAUI
= (0x1D << 26),
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LWPC
= OPC_LW
| 0x5,
87 OPC_LBU
= (0x24 << 26),
88 OPC_LHU
= (0x25 << 26),
89 OPC_LWR
= (0x26 << 26),
90 OPC_LWU
= (0x27 << 26),
91 OPC_SB
= (0x28 << 26),
92 OPC_SH
= (0x29 << 26),
93 OPC_SWL
= (0x2A << 26),
94 OPC_SW
= (0x2B << 26),
95 OPC_SDL
= (0x2C << 26),
96 OPC_SDR
= (0x2D << 26),
97 OPC_SWR
= (0x2E << 26),
98 OPC_LL
= (0x30 << 26),
99 OPC_LLD
= (0x34 << 26),
100 OPC_LD
= (0x37 << 26),
101 OPC_LDPC
= OPC_LD
| 0x5,
102 OPC_SC
= (0x38 << 26),
103 OPC_SCD
= (0x3C << 26),
104 OPC_SD
= (0x3F << 26),
105 /* Floating point load/store */
106 OPC_LWC1
= (0x31 << 26),
107 OPC_LWC2
= (0x32 << 26),
108 OPC_LDC1
= (0x35 << 26),
109 OPC_LDC2
= (0x36 << 26),
110 OPC_SWC1
= (0x39 << 26),
111 OPC_SWC2
= (0x3A << 26),
112 OPC_SDC1
= (0x3D << 26),
113 OPC_SDC2
= (0x3E << 26),
114 /* Compact Branches */
115 OPC_BLEZALC
= (0x06 << 26),
116 OPC_BGEZALC
= (0x06 << 26),
117 OPC_BGEUC
= (0x06 << 26),
118 OPC_BGTZALC
= (0x07 << 26),
119 OPC_BLTZALC
= (0x07 << 26),
120 OPC_BLTUC
= (0x07 << 26),
121 OPC_BOVC
= (0x08 << 26),
122 OPC_BEQZALC
= (0x08 << 26),
123 OPC_BEQC
= (0x08 << 26),
124 OPC_BLEZC
= (0x16 << 26),
125 OPC_BGEZC
= (0x16 << 26),
126 OPC_BGEC
= (0x16 << 26),
127 OPC_BGTZC
= (0x17 << 26),
128 OPC_BLTZC
= (0x17 << 26),
129 OPC_BLTC
= (0x17 << 26),
130 OPC_BNVC
= (0x18 << 26),
131 OPC_BNEZALC
= (0x18 << 26),
132 OPC_BNEC
= (0x18 << 26),
133 OPC_BC
= (0x32 << 26),
134 OPC_BEQZC
= (0x36 << 26),
135 OPC_JIC
= (0x36 << 26),
136 OPC_BALC
= (0x3A << 26),
137 OPC_BNEZC
= (0x3E << 26),
138 OPC_JIALC
= (0x3E << 26),
139 /* MDMX ASE specific */
140 OPC_MDMX
= (0x1E << 26),
141 /* MSA ASE, same as MDMX */
143 /* Cache and prefetch */
144 OPC_CACHE
= (0x2F << 26),
145 OPC_PREF
= (0x33 << 26),
146 /* PC-relative address computation / loads */
147 OPC_PCREL
= (0x3B << 26),
150 /* PC-relative address computation / loads */
151 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
152 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
154 /* Instructions determined by bits 19 and 20 */
155 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
156 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
157 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
159 /* Instructions determined by bits 16 ... 20 */
160 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
161 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
164 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
167 /* MIPS special opcodes */
168 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
172 OPC_SLL
= 0x00 | OPC_SPECIAL
,
173 /* NOP is SLL r0, r0, 0 */
174 /* SSNOP is SLL r0, r0, 1 */
175 /* EHB is SLL r0, r0, 3 */
176 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
177 OPC_ROTR
= OPC_SRL
| (1 << 21),
178 OPC_SRA
= 0x03 | OPC_SPECIAL
,
179 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
180 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
181 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
182 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
183 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
184 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
185 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
186 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
187 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
188 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
189 OPC_DROTR
= OPC_DSRL
| (1 << 21),
190 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
191 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
192 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
193 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
194 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
195 /* Multiplication / division */
196 OPC_MULT
= 0x18 | OPC_SPECIAL
,
197 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
198 OPC_DIV
= 0x1A | OPC_SPECIAL
,
199 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
200 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
201 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
202 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
203 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
205 /* 2 registers arithmetic / logic */
206 OPC_ADD
= 0x20 | OPC_SPECIAL
,
207 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
208 OPC_SUB
= 0x22 | OPC_SPECIAL
,
209 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
210 OPC_AND
= 0x24 | OPC_SPECIAL
,
211 OPC_OR
= 0x25 | OPC_SPECIAL
,
212 OPC_XOR
= 0x26 | OPC_SPECIAL
,
213 OPC_NOR
= 0x27 | OPC_SPECIAL
,
214 OPC_SLT
= 0x2A | OPC_SPECIAL
,
215 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
216 OPC_DADD
= 0x2C | OPC_SPECIAL
,
217 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
218 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
219 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
221 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
222 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
224 OPC_TGE
= 0x30 | OPC_SPECIAL
,
225 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
226 OPC_TLT
= 0x32 | OPC_SPECIAL
,
227 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
228 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
229 OPC_TNE
= 0x36 | OPC_SPECIAL
,
230 /* HI / LO registers load & stores */
231 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
232 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
233 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
234 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
235 /* Conditional moves */
236 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
237 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
239 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
240 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
242 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
245 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
246 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
247 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
248 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
249 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
251 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
252 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
253 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
254 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
257 /* R6 Multiply and Divide instructions have the same Opcode
258 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
259 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
262 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
263 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
264 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
265 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
266 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
267 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
268 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
269 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
271 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
272 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
273 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
274 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
275 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
276 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
277 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
278 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
280 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
281 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
282 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
283 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
284 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
286 OPC_LSA
= 0x05 | OPC_SPECIAL
,
287 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
290 /* Multiplication variants of the vr54xx. */
291 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
294 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
295 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
297 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
299 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
301 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
303 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
304 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
305 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
306 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
307 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
310 /* REGIMM (rt field) opcodes */
311 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
314 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
315 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
316 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
317 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
318 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
319 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
320 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
321 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
322 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
323 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
324 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
325 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
326 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
327 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
328 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
329 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
331 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
332 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
335 /* Special2 opcodes */
336 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
339 /* Multiply & xxx operations */
340 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
341 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
342 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
343 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
344 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
346 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
347 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
348 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
349 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
350 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
351 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
352 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
353 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
354 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
355 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
356 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
357 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
359 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
360 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
361 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
362 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
364 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
367 /* Special3 opcodes */
368 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
371 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
372 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
373 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
374 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
375 OPC_INS
= 0x04 | OPC_SPECIAL3
,
376 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
377 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
378 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
379 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
380 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
381 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
382 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
383 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
386 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
387 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
388 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
389 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
390 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
391 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
392 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
393 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
394 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
395 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
396 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
397 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
400 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
401 /* MIPS DSP Arithmetic */
402 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
403 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
404 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
406 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
407 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
408 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
409 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
410 /* MIPS DSP GPR-Based Shift Sub-class */
411 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
412 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
413 /* MIPS DSP Multiply Sub-class insns */
414 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
415 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
416 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
417 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
418 /* DSP Bit/Manipulation Sub-class */
419 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
420 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
421 /* MIPS DSP Append Sub-class */
422 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
423 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
424 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
425 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
426 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
429 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
430 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
431 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
432 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
433 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
434 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
438 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
441 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
442 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
443 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
444 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
445 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
446 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
450 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
453 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
454 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
455 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
456 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
457 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
460 /* MIPS DSP REGIMM opcodes */
462 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
463 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
466 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
469 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
470 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
471 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
472 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
475 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
477 /* MIPS DSP Arithmetic Sub-class */
478 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
492 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
496 /* MIPS DSP Multiply Sub-class insns */
497 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
505 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
506 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
521 /* MIPS DSP Multiply Sub-class insns */
522 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
523 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
528 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
544 /* DSP Bit/Manipulation Sub-class */
545 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
552 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
554 /* MIPS DSP Arithmetic Sub-class */
555 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
562 /* DSP Compare-Pick Sub-class */
563 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
580 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
582 /* MIPS DSP GPR-Based Shift Sub-class */
583 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
607 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
609 /* MIPS DSP Multiply Sub-class insns */
610 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
634 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
636 /* DSP Bit/Manipulation Sub-class */
637 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
640 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Append Sub-class */
643 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
644 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
645 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
648 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
650 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
651 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
663 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
665 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
666 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
667 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
670 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Arithmetic Sub-class */
673 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
690 /* DSP Bit/Manipulation Sub-class */
691 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
699 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
701 /* MIPS DSP Multiply Sub-class insns */
702 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
707 /* MIPS DSP Arithmetic Sub-class */
708 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
731 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
733 /* DSP Compare-Pick Sub-class */
734 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
753 /* MIPS DSP Arithmetic Sub-class */
754 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
764 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Append Sub-class */
767 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
768 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
770 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
773 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
775 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
776 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
799 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
801 /* DSP Bit/Manipulation Sub-class */
802 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
805 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
807 /* MIPS DSP Multiply Sub-class insns */
808 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
836 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP GPR-Based Shift Sub-class */
839 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
867 /* Coprocessor 0 (rs field) */
868 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
871 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
872 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
873 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
874 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
875 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
876 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
877 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
878 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
879 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
880 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
881 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
882 OPC_C0
= (0x10 << 21) | OPC_CP0
,
883 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
888 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
891 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
892 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
894 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
895 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
898 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
901 /* Coprocessor 0 (with rs == C0) */
902 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
905 OPC_TLBR
= 0x01 | OPC_C0
,
906 OPC_TLBWI
= 0x02 | OPC_C0
,
907 OPC_TLBINV
= 0x03 | OPC_C0
,
908 OPC_TLBINVF
= 0x04 | OPC_C0
,
909 OPC_TLBWR
= 0x06 | OPC_C0
,
910 OPC_TLBP
= 0x08 | OPC_C0
,
911 OPC_RFE
= 0x10 | OPC_C0
,
912 OPC_ERET
= 0x18 | OPC_C0
,
913 OPC_DERET
= 0x1F | OPC_C0
,
914 OPC_WAIT
= 0x20 | OPC_C0
,
917 /* Coprocessor 1 (rs field) */
918 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
920 /* Values for the fmt field in FP instructions */
922 /* 0 - 15 are reserved */
923 FMT_S
= 16, /* single fp */
924 FMT_D
= 17, /* double fp */
925 FMT_E
= 18, /* extended fp */
926 FMT_Q
= 19, /* quad fp */
927 FMT_W
= 20, /* 32-bit fixed */
928 FMT_L
= 21, /* 64-bit fixed */
929 FMT_PS
= 22, /* paired single fp */
930 /* 23 - 31 are reserved */
934 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
935 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
936 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
937 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
938 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
939 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
940 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
941 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
942 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
943 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
944 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
945 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
946 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
947 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
948 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
949 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
950 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
951 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
952 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
953 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
954 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
955 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
956 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
957 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
958 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
959 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
960 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
961 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
962 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
963 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
966 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
967 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
970 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
971 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
972 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
973 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
977 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
978 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
982 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
983 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
986 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1127 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1128 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1129 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1130 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1131 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1132 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1133 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1134 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1135 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1136 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1137 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1138 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1139 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1140 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1141 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1142 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1143 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1144 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1145 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1146 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1147 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1149 /* MI10 instruction */
1150 OPC_LD_B
= (0x20) | OPC_MSA
,
1151 OPC_LD_H
= (0x21) | OPC_MSA
,
1152 OPC_LD_W
= (0x22) | OPC_MSA
,
1153 OPC_LD_D
= (0x23) | OPC_MSA
,
1154 OPC_ST_B
= (0x24) | OPC_MSA
,
1155 OPC_ST_H
= (0x25) | OPC_MSA
,
1156 OPC_ST_W
= (0x26) | OPC_MSA
,
1157 OPC_ST_D
= (0x27) | OPC_MSA
,
1161 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1162 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1163 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1164 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1165 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1166 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1167 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1168 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1169 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1170 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1171 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1172 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1173 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1175 /* I8 instruction */
1176 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1177 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1178 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1179 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1180 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1181 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1182 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1183 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1184 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1185 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1187 /* VEC/2R/2RF instruction */
1188 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1189 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1190 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1191 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1192 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1193 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1194 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1196 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1199 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1200 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1201 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1202 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1203 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1205 /* 2RF instruction df(bit 16) = _w, _d */
1206 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1207 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1209 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1210 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1211 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1212 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1213 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1214 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1216 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1217 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1218 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1220 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1223 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1224 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1225 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1226 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1227 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1228 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1229 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1230 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1231 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1232 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1233 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1234 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1235 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1236 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1237 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1238 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1239 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1240 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1241 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1242 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1243 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1244 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1245 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1246 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1247 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1248 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1249 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1250 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1251 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1252 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1253 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1254 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1255 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1256 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1257 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1258 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1259 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1260 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1261 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1262 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1263 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1264 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1265 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1266 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1267 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1268 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1269 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1270 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1271 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1272 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1273 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1274 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1275 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1276 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1277 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1278 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1279 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1280 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1281 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1282 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1283 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1284 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1285 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1286 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1288 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1289 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1290 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1291 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1293 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1295 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1296 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1299 /* 3RF instruction _df(bit 21) = _w, _d */
1300 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1303 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1304 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1305 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1306 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1307 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1308 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1309 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1310 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1311 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1312 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1313 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1317 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1318 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1319 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1320 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1321 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1322 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1323 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1327 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1330 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1331 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1332 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1333 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1334 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1335 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1336 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1337 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1338 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1339 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1340 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1342 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1343 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1344 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1345 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1346 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1347 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1348 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1349 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1350 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1351 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1352 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1357 /* global register indices */
1358 static TCGv_env cpu_env
;
1359 static TCGv cpu_gpr
[32], cpu_PC
;
1360 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1361 static TCGv cpu_dspctrl
, btarget
, bcond
;
1362 static TCGv_i32 hflags
;
1363 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1364 static TCGv_i64 fpu_f64
[32];
1365 static TCGv_i64 msa_wr_d
[64];
1367 #include "exec/gen-icount.h"
1369 #define gen_helper_0e0i(name, arg) do { \
1370 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1371 gen_helper_##name(cpu_env, helper_tmp); \
1372 tcg_temp_free_i32(helper_tmp); \
1375 #define gen_helper_0e1i(name, arg1, arg2) do { \
1376 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1377 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1378 tcg_temp_free_i32(helper_tmp); \
1381 #define gen_helper_1e0i(name, ret, arg1) do { \
1382 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1383 gen_helper_##name(ret, cpu_env, helper_tmp); \
1384 tcg_temp_free_i32(helper_tmp); \
1387 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1388 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1389 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1390 tcg_temp_free_i32(helper_tmp); \
1393 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1394 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1395 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1396 tcg_temp_free_i32(helper_tmp); \
1399 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1400 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1401 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1402 tcg_temp_free_i32(helper_tmp); \
1405 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1406 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1407 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1408 tcg_temp_free_i32(helper_tmp); \
1411 typedef struct DisasContext
{
1412 struct TranslationBlock
*tb
;
1413 target_ulong pc
, saved_pc
;
1415 int singlestep_enabled
;
1417 int32_t CP0_Config1
;
1418 /* Routine used to access memory */
1420 TCGMemOp default_tcg_memop_mask
;
1421 uint32_t hflags
, saved_hflags
;
1423 target_ulong btarget
;
1432 int CP0_LLAddr_shift
;
1439 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1440 * exception condition */
1441 BS_STOP
= 1, /* We want to stop translation for any reason */
1442 BS_BRANCH
= 2, /* We reached a branch condition */
1443 BS_EXCP
= 3, /* We reached an exception condition */
1446 static const char * const regnames
[] = {
1447 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1448 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1449 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1450 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1453 static const char * const regnames_HI
[] = {
1454 "HI0", "HI1", "HI2", "HI3",
1457 static const char * const regnames_LO
[] = {
1458 "LO0", "LO1", "LO2", "LO3",
1461 static const char * const fregnames
[] = {
1462 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1463 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1464 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1465 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1468 static const char * const msaregnames
[] = {
1469 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1470 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1471 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1472 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1473 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1474 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1475 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1476 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1477 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1478 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1479 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1480 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1481 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1482 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1483 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1484 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1487 #define LOG_DISAS(...) \
1489 if (MIPS_DEBUG_DISAS) { \
1490 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1494 #define MIPS_INVAL(op) \
1496 if (MIPS_DEBUG_DISAS) { \
1497 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1498 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1499 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1500 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1504 /* General purpose registers moves. */
1505 static inline void gen_load_gpr (TCGv t
, int reg
)
1508 tcg_gen_movi_tl(t
, 0);
1510 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1513 static inline void gen_store_gpr (TCGv t
, int reg
)
1516 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1519 /* Moves to/from shadow registers. */
1520 static inline void gen_load_srsgpr (int from
, int to
)
1522 TCGv t0
= tcg_temp_new();
1525 tcg_gen_movi_tl(t0
, 0);
1527 TCGv_i32 t2
= tcg_temp_new_i32();
1528 TCGv_ptr addr
= tcg_temp_new_ptr();
1530 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1531 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1532 tcg_gen_andi_i32(t2
, t2
, 0xf);
1533 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1534 tcg_gen_ext_i32_ptr(addr
, t2
);
1535 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1537 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1538 tcg_temp_free_ptr(addr
);
1539 tcg_temp_free_i32(t2
);
1541 gen_store_gpr(t0
, to
);
1545 static inline void gen_store_srsgpr (int from
, int to
)
1548 TCGv t0
= tcg_temp_new();
1549 TCGv_i32 t2
= tcg_temp_new_i32();
1550 TCGv_ptr addr
= tcg_temp_new_ptr();
1552 gen_load_gpr(t0
, from
);
1553 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1554 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1555 tcg_gen_andi_i32(t2
, t2
, 0xf);
1556 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1557 tcg_gen_ext_i32_ptr(addr
, t2
);
1558 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1560 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1561 tcg_temp_free_ptr(addr
);
1562 tcg_temp_free_i32(t2
);
1568 static inline void gen_save_pc(target_ulong pc
)
1570 tcg_gen_movi_tl(cpu_PC
, pc
);
1573 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1575 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1576 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1577 gen_save_pc(ctx
->pc
);
1578 ctx
->saved_pc
= ctx
->pc
;
1580 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1581 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1582 ctx
->saved_hflags
= ctx
->hflags
;
1583 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1589 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1595 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1597 ctx
->saved_hflags
= ctx
->hflags
;
1598 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1604 ctx
->btarget
= env
->btarget
;
1609 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1611 TCGv_i32 texcp
= tcg_const_i32(excp
);
1612 TCGv_i32 terr
= tcg_const_i32(err
);
1613 save_cpu_state(ctx
, 1);
1614 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1615 tcg_temp_free_i32(terr
);
1616 tcg_temp_free_i32(texcp
);
1617 ctx
->bstate
= BS_EXCP
;
1620 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1622 gen_helper_0e0i(raise_exception
, excp
);
1625 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1627 generate_exception_err(ctx
, excp
, 0);
1630 /* Floating point register moves. */
1631 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1633 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1634 generate_exception(ctx
, EXCP_RI
);
1636 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1639 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1642 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1643 generate_exception(ctx
, EXCP_RI
);
1645 t64
= tcg_temp_new_i64();
1646 tcg_gen_extu_i32_i64(t64
, t
);
1647 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1648 tcg_temp_free_i64(t64
);
1651 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1653 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1654 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1656 gen_load_fpr32(ctx
, t
, reg
| 1);
1660 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1662 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1663 TCGv_i64 t64
= tcg_temp_new_i64();
1664 tcg_gen_extu_i32_i64(t64
, t
);
1665 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1666 tcg_temp_free_i64(t64
);
1668 gen_store_fpr32(ctx
, t
, reg
| 1);
1672 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1674 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1675 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1677 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1681 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1683 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1684 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1687 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1688 t0
= tcg_temp_new_i64();
1689 tcg_gen_shri_i64(t0
, t
, 32);
1690 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1691 tcg_temp_free_i64(t0
);
1695 static inline int get_fp_bit (int cc
)
1703 /* Addresses computation */
1704 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1706 tcg_gen_add_tl(ret
, arg0
, arg1
);
1708 #if defined(TARGET_MIPS64)
1709 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1710 tcg_gen_ext32s_i64(ret
, ret
);
1715 /* Addresses computation (translation time) */
1716 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1719 target_long sum
= base
+ offset
;
1721 #if defined(TARGET_MIPS64)
1722 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1729 /* Sign-extract the low 32-bits to a target_long. */
1730 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1732 #if defined(TARGET_MIPS64)
1733 tcg_gen_ext32s_i64(ret
, arg
);
1735 tcg_gen_extrl_i64_i32(ret
, arg
);
1739 /* Sign-extract the high 32-bits to a target_long. */
1740 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1742 #if defined(TARGET_MIPS64)
1743 tcg_gen_sari_i64(ret
, arg
, 32);
1745 tcg_gen_extrh_i64_i32(ret
, arg
);
1749 static inline void check_cp0_enabled(DisasContext
*ctx
)
1751 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1752 generate_exception_err(ctx
, EXCP_CpU
, 0);
1755 static inline void check_cp1_enabled(DisasContext
*ctx
)
1757 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1758 generate_exception_err(ctx
, EXCP_CpU
, 1);
1761 /* Verify that the processor is running with COP1X instructions enabled.
1762 This is associated with the nabla symbol in the MIPS32 and MIPS64
1765 static inline void check_cop1x(DisasContext
*ctx
)
1767 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1768 generate_exception_end(ctx
, EXCP_RI
);
1771 /* Verify that the processor is running with 64-bit floating-point
1772 operations enabled. */
1774 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1776 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1777 generate_exception_end(ctx
, EXCP_RI
);
1781 * Verify if floating point register is valid; an operation is not defined
1782 * if bit 0 of any register specification is set and the FR bit in the
1783 * Status register equals zero, since the register numbers specify an
1784 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1785 * in the Status register equals one, both even and odd register numbers
1786 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1788 * Multiple 64 bit wide registers can be checked by calling
1789 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1791 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1793 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1794 generate_exception_end(ctx
, EXCP_RI
);
1797 /* Verify that the processor is running with DSP instructions enabled.
1798 This is enabled by CP0 Status register MX(24) bit.
1801 static inline void check_dsp(DisasContext
*ctx
)
1803 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1804 if (ctx
->insn_flags
& ASE_DSP
) {
1805 generate_exception_end(ctx
, EXCP_DSPDIS
);
1807 generate_exception_end(ctx
, EXCP_RI
);
1812 static inline void check_dspr2(DisasContext
*ctx
)
1814 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1815 if (ctx
->insn_flags
& ASE_DSP
) {
1816 generate_exception_end(ctx
, EXCP_DSPDIS
);
1818 generate_exception_end(ctx
, EXCP_RI
);
1823 /* This code generates a "reserved instruction" exception if the
1824 CPU does not support the instruction set corresponding to flags. */
1825 static inline void check_insn(DisasContext
*ctx
, int flags
)
1827 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1828 generate_exception_end(ctx
, EXCP_RI
);
1832 /* This code generates a "reserved instruction" exception if the
1833 CPU has corresponding flag set which indicates that the instruction
1834 has been removed. */
1835 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1837 if (unlikely(ctx
->insn_flags
& flags
)) {
1838 generate_exception_end(ctx
, EXCP_RI
);
1842 /* This code generates a "reserved instruction" exception if the
1843 CPU does not support 64-bit paired-single (PS) floating point data type */
1844 static inline void check_ps(DisasContext
*ctx
)
1846 if (unlikely(!ctx
->ps
)) {
1847 generate_exception(ctx
, EXCP_RI
);
1849 check_cp1_64bitmode(ctx
);
1852 #ifdef TARGET_MIPS64
1853 /* This code generates a "reserved instruction" exception if 64-bit
1854 instructions are not enabled. */
1855 static inline void check_mips_64(DisasContext
*ctx
)
1857 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1858 generate_exception_end(ctx
, EXCP_RI
);
1862 #ifndef CONFIG_USER_ONLY
1863 static inline void check_mvh(DisasContext
*ctx
)
1865 if (unlikely(!ctx
->mvh
)) {
1866 generate_exception(ctx
, EXCP_RI
);
1871 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1872 calling interface for 32 and 64-bit FPRs. No sense in changing
1873 all callers for gen_load_fpr32 when we need the CTX parameter for
1875 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1876 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1877 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1878 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1879 int ft, int fs, int cc) \
1881 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1882 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1891 check_cp1_registers(ctx, fs | ft); \
1899 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1900 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1902 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1903 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1904 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1905 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1906 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1907 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1908 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1909 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1910 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1911 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1912 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1913 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1914 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1915 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1916 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1917 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1920 tcg_temp_free_i##bits (fp0); \
1921 tcg_temp_free_i##bits (fp1); \
1924 FOP_CONDS(, 0, d
, FMT_D
, 64)
1925 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1926 FOP_CONDS(, 0, s
, FMT_S
, 32)
1927 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1928 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1929 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1932 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1933 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1934 int ft, int fs, int fd) \
1936 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1937 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1938 if (ifmt == FMT_D) { \
1939 check_cp1_registers(ctx, fs | ft | fd); \
1941 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1942 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1945 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1948 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1951 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1954 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1957 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1960 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1963 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1966 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1969 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1972 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1975 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1978 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1981 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1984 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1987 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1990 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1993 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1996 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1999 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2002 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2005 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2008 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2014 tcg_temp_free_i ## bits (fp0); \
2015 tcg_temp_free_i ## bits (fp1); \
2018 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2019 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2021 #undef gen_ldcmp_fpr32
2022 #undef gen_ldcmp_fpr64
2024 /* load/store instructions. */
2025 #ifdef CONFIG_USER_ONLY
2026 #define OP_LD_ATOMIC(insn,fname) \
2027 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2029 TCGv t0 = tcg_temp_new(); \
2030 tcg_gen_mov_tl(t0, arg1); \
2031 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2032 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2033 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2034 tcg_temp_free(t0); \
2037 #define OP_LD_ATOMIC(insn,fname) \
2038 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2040 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2043 OP_LD_ATOMIC(ll
,ld32s
);
2044 #if defined(TARGET_MIPS64)
2045 OP_LD_ATOMIC(lld
,ld64
);
2049 #ifdef CONFIG_USER_ONLY
2050 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2051 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2053 TCGv t0 = tcg_temp_new(); \
2054 TCGLabel *l1 = gen_new_label(); \
2055 TCGLabel *l2 = gen_new_label(); \
2057 tcg_gen_andi_tl(t0, arg2, almask); \
2058 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2059 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2060 generate_exception(ctx, EXCP_AdES); \
2061 gen_set_label(l1); \
2062 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2063 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2064 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2065 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2066 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2067 generate_exception_end(ctx, EXCP_SC); \
2068 gen_set_label(l2); \
2069 tcg_gen_movi_tl(t0, 0); \
2070 gen_store_gpr(t0, rt); \
2071 tcg_temp_free(t0); \
2074 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2075 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2077 TCGv t0 = tcg_temp_new(); \
2078 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2079 gen_store_gpr(t0, rt); \
2080 tcg_temp_free(t0); \
2083 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2084 #if defined(TARGET_MIPS64)
2085 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2089 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2090 int base
, int16_t offset
)
2093 tcg_gen_movi_tl(addr
, offset
);
2094 } else if (offset
== 0) {
2095 gen_load_gpr(addr
, base
);
2097 tcg_gen_movi_tl(addr
, offset
);
2098 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2102 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2104 target_ulong pc
= ctx
->pc
;
2106 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2107 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2112 pc
&= ~(target_ulong
)3;
2117 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2118 int rt
, int base
, int16_t offset
)
2122 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2123 /* Loongson CPU uses a load to zero register for prefetch.
2124 We emulate it as a NOP. On other CPU we must perform the
2125 actual memory access. */
2129 t0
= tcg_temp_new();
2130 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2133 #if defined(TARGET_MIPS64)
2135 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2136 ctx
->default_tcg_memop_mask
);
2137 gen_store_gpr(t0
, rt
);
2140 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2141 ctx
->default_tcg_memop_mask
);
2142 gen_store_gpr(t0
, rt
);
2146 op_ld_lld(t0
, t0
, ctx
);
2147 gen_store_gpr(t0
, rt
);
2150 t1
= tcg_temp_new();
2151 /* Do a byte access to possibly trigger a page
2152 fault with the unaligned address. */
2153 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2154 tcg_gen_andi_tl(t1
, t0
, 7);
2155 #ifndef TARGET_WORDS_BIGENDIAN
2156 tcg_gen_xori_tl(t1
, t1
, 7);
2158 tcg_gen_shli_tl(t1
, t1
, 3);
2159 tcg_gen_andi_tl(t0
, t0
, ~7);
2160 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2161 tcg_gen_shl_tl(t0
, t0
, t1
);
2162 t2
= tcg_const_tl(-1);
2163 tcg_gen_shl_tl(t2
, t2
, t1
);
2164 gen_load_gpr(t1
, rt
);
2165 tcg_gen_andc_tl(t1
, t1
, t2
);
2167 tcg_gen_or_tl(t0
, t0
, t1
);
2169 gen_store_gpr(t0
, rt
);
2172 t1
= tcg_temp_new();
2173 /* Do a byte access to possibly trigger a page
2174 fault with the unaligned address. */
2175 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2176 tcg_gen_andi_tl(t1
, t0
, 7);
2177 #ifdef TARGET_WORDS_BIGENDIAN
2178 tcg_gen_xori_tl(t1
, t1
, 7);
2180 tcg_gen_shli_tl(t1
, t1
, 3);
2181 tcg_gen_andi_tl(t0
, t0
, ~7);
2182 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2183 tcg_gen_shr_tl(t0
, t0
, t1
);
2184 tcg_gen_xori_tl(t1
, t1
, 63);
2185 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2186 tcg_gen_shl_tl(t2
, t2
, t1
);
2187 gen_load_gpr(t1
, rt
);
2188 tcg_gen_and_tl(t1
, t1
, t2
);
2190 tcg_gen_or_tl(t0
, t0
, t1
);
2192 gen_store_gpr(t0
, rt
);
2195 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2196 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2198 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2199 gen_store_gpr(t0
, rt
);
2203 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2204 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2206 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2207 gen_store_gpr(t0
, rt
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2211 ctx
->default_tcg_memop_mask
);
2212 gen_store_gpr(t0
, rt
);
2215 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2216 ctx
->default_tcg_memop_mask
);
2217 gen_store_gpr(t0
, rt
);
2220 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2221 ctx
->default_tcg_memop_mask
);
2222 gen_store_gpr(t0
, rt
);
2225 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2226 gen_store_gpr(t0
, rt
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2230 gen_store_gpr(t0
, rt
);
2233 t1
= tcg_temp_new();
2234 /* Do a byte access to possibly trigger a page
2235 fault with the unaligned address. */
2236 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2237 tcg_gen_andi_tl(t1
, t0
, 3);
2238 #ifndef TARGET_WORDS_BIGENDIAN
2239 tcg_gen_xori_tl(t1
, t1
, 3);
2241 tcg_gen_shli_tl(t1
, t1
, 3);
2242 tcg_gen_andi_tl(t0
, t0
, ~3);
2243 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2244 tcg_gen_shl_tl(t0
, t0
, t1
);
2245 t2
= tcg_const_tl(-1);
2246 tcg_gen_shl_tl(t2
, t2
, t1
);
2247 gen_load_gpr(t1
, rt
);
2248 tcg_gen_andc_tl(t1
, t1
, t2
);
2250 tcg_gen_or_tl(t0
, t0
, t1
);
2252 tcg_gen_ext32s_tl(t0
, t0
);
2253 gen_store_gpr(t0
, rt
);
2256 t1
= tcg_temp_new();
2257 /* Do a byte access to possibly trigger a page
2258 fault with the unaligned address. */
2259 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2260 tcg_gen_andi_tl(t1
, t0
, 3);
2261 #ifdef TARGET_WORDS_BIGENDIAN
2262 tcg_gen_xori_tl(t1
, t1
, 3);
2264 tcg_gen_shli_tl(t1
, t1
, 3);
2265 tcg_gen_andi_tl(t0
, t0
, ~3);
2266 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2267 tcg_gen_shr_tl(t0
, t0
, t1
);
2268 tcg_gen_xori_tl(t1
, t1
, 31);
2269 t2
= tcg_const_tl(0xfffffffeull
);
2270 tcg_gen_shl_tl(t2
, t2
, t1
);
2271 gen_load_gpr(t1
, rt
);
2272 tcg_gen_and_tl(t1
, t1
, t2
);
2274 tcg_gen_or_tl(t0
, t0
, t1
);
2276 tcg_gen_ext32s_tl(t0
, t0
);
2277 gen_store_gpr(t0
, rt
);
2281 op_ld_ll(t0
, t0
, ctx
);
2282 gen_store_gpr(t0
, rt
);
2289 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2290 int base
, int16_t offset
)
2292 TCGv t0
= tcg_temp_new();
2293 TCGv t1
= tcg_temp_new();
2295 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2296 gen_load_gpr(t1
, rt
);
2298 #if defined(TARGET_MIPS64)
2300 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2301 ctx
->default_tcg_memop_mask
);
2304 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2307 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2311 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2312 ctx
->default_tcg_memop_mask
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2316 ctx
->default_tcg_memop_mask
);
2319 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2322 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2325 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2333 /* Store conditional */
2334 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2335 int base
, int16_t offset
)
2339 #ifdef CONFIG_USER_ONLY
2340 t0
= tcg_temp_local_new();
2341 t1
= tcg_temp_local_new();
2343 t0
= tcg_temp_new();
2344 t1
= tcg_temp_new();
2346 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2347 gen_load_gpr(t1
, rt
);
2349 #if defined(TARGET_MIPS64)
2352 op_st_scd(t1
, t0
, rt
, ctx
);
2357 op_st_sc(t1
, t0
, rt
, ctx
);
2364 /* Load and store */
2365 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2366 int base
, int16_t offset
)
2368 TCGv t0
= tcg_temp_new();
2370 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2371 /* Don't do NOP if destination is zero: we must perform the actual
2376 TCGv_i32 fp0
= tcg_temp_new_i32();
2377 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2378 ctx
->default_tcg_memop_mask
);
2379 gen_store_fpr32(ctx
, fp0
, ft
);
2380 tcg_temp_free_i32(fp0
);
2385 TCGv_i32 fp0
= tcg_temp_new_i32();
2386 gen_load_fpr32(ctx
, fp0
, ft
);
2387 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2388 ctx
->default_tcg_memop_mask
);
2389 tcg_temp_free_i32(fp0
);
2394 TCGv_i64 fp0
= tcg_temp_new_i64();
2395 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2396 ctx
->default_tcg_memop_mask
);
2397 gen_store_fpr64(ctx
, fp0
, ft
);
2398 tcg_temp_free_i64(fp0
);
2403 TCGv_i64 fp0
= tcg_temp_new_i64();
2404 gen_load_fpr64(ctx
, fp0
, ft
);
2405 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2406 ctx
->default_tcg_memop_mask
);
2407 tcg_temp_free_i64(fp0
);
2411 MIPS_INVAL("flt_ldst");
2412 generate_exception_end(ctx
, EXCP_RI
);
2419 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2420 int rs
, int16_t imm
)
2422 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2423 check_cp1_enabled(ctx
);
2427 check_insn(ctx
, ISA_MIPS2
);
2430 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2433 generate_exception_err(ctx
, EXCP_CpU
, 1);
2437 /* Arithmetic with immediate operand */
2438 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2439 int rt
, int rs
, int16_t imm
)
2441 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2443 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2444 /* If no destination, treat it as a NOP.
2445 For addi, we must generate the overflow exception when needed. */
2451 TCGv t0
= tcg_temp_local_new();
2452 TCGv t1
= tcg_temp_new();
2453 TCGv t2
= tcg_temp_new();
2454 TCGLabel
*l1
= gen_new_label();
2456 gen_load_gpr(t1
, rs
);
2457 tcg_gen_addi_tl(t0
, t1
, uimm
);
2458 tcg_gen_ext32s_tl(t0
, t0
);
2460 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2461 tcg_gen_xori_tl(t2
, t0
, uimm
);
2462 tcg_gen_and_tl(t1
, t1
, t2
);
2464 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2466 /* operands of same sign, result different sign */
2467 generate_exception(ctx
, EXCP_OVERFLOW
);
2469 tcg_gen_ext32s_tl(t0
, t0
);
2470 gen_store_gpr(t0
, rt
);
2476 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2477 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2479 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2482 #if defined(TARGET_MIPS64)
2485 TCGv t0
= tcg_temp_local_new();
2486 TCGv t1
= tcg_temp_new();
2487 TCGv t2
= tcg_temp_new();
2488 TCGLabel
*l1
= gen_new_label();
2490 gen_load_gpr(t1
, rs
);
2491 tcg_gen_addi_tl(t0
, t1
, uimm
);
2493 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2494 tcg_gen_xori_tl(t2
, t0
, uimm
);
2495 tcg_gen_and_tl(t1
, t1
, t2
);
2497 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2499 /* operands of same sign, result different sign */
2500 generate_exception(ctx
, EXCP_OVERFLOW
);
2502 gen_store_gpr(t0
, rt
);
2508 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2510 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2517 /* Logic with immediate operand */
2518 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2519 int rt
, int rs
, int16_t imm
)
2524 /* If no destination, treat it as a NOP. */
2527 uimm
= (uint16_t)imm
;
2530 if (likely(rs
!= 0))
2531 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2533 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2537 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2539 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2542 if (likely(rs
!= 0))
2543 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2545 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2548 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2550 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2551 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2553 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2562 /* Set on less than with immediate operand */
2563 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2564 int rt
, int rs
, int16_t imm
)
2566 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2570 /* If no destination, treat it as a NOP. */
2573 t0
= tcg_temp_new();
2574 gen_load_gpr(t0
, rs
);
2577 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2580 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2586 /* Shifts with immediate operand */
2587 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2588 int rt
, int rs
, int16_t imm
)
2590 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2594 /* If no destination, treat it as a NOP. */
2598 t0
= tcg_temp_new();
2599 gen_load_gpr(t0
, rs
);
2602 tcg_gen_shli_tl(t0
, t0
, uimm
);
2603 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2606 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2610 tcg_gen_ext32u_tl(t0
, t0
);
2611 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2613 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2618 TCGv_i32 t1
= tcg_temp_new_i32();
2620 tcg_gen_trunc_tl_i32(t1
, t0
);
2621 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2622 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2623 tcg_temp_free_i32(t1
);
2625 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2628 #if defined(TARGET_MIPS64)
2630 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2633 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2636 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2640 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2642 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2646 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2649 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2652 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2655 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2663 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2664 int rd
, int rs
, int rt
)
2666 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2667 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2668 /* If no destination, treat it as a NOP.
2669 For add & sub, we must generate the overflow exception when needed. */
2676 TCGv t0
= tcg_temp_local_new();
2677 TCGv t1
= tcg_temp_new();
2678 TCGv t2
= tcg_temp_new();
2679 TCGLabel
*l1
= gen_new_label();
2681 gen_load_gpr(t1
, rs
);
2682 gen_load_gpr(t2
, rt
);
2683 tcg_gen_add_tl(t0
, t1
, t2
);
2684 tcg_gen_ext32s_tl(t0
, t0
);
2685 tcg_gen_xor_tl(t1
, t1
, t2
);
2686 tcg_gen_xor_tl(t2
, t0
, t2
);
2687 tcg_gen_andc_tl(t1
, t2
, t1
);
2689 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2691 /* operands of same sign, result different sign */
2692 generate_exception(ctx
, EXCP_OVERFLOW
);
2694 gen_store_gpr(t0
, rd
);
2699 if (rs
!= 0 && rt
!= 0) {
2700 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2701 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2702 } else if (rs
== 0 && rt
!= 0) {
2703 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2704 } else if (rs
!= 0 && rt
== 0) {
2705 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2707 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2712 TCGv t0
= tcg_temp_local_new();
2713 TCGv t1
= tcg_temp_new();
2714 TCGv t2
= tcg_temp_new();
2715 TCGLabel
*l1
= gen_new_label();
2717 gen_load_gpr(t1
, rs
);
2718 gen_load_gpr(t2
, rt
);
2719 tcg_gen_sub_tl(t0
, t1
, t2
);
2720 tcg_gen_ext32s_tl(t0
, t0
);
2721 tcg_gen_xor_tl(t2
, t1
, t2
);
2722 tcg_gen_xor_tl(t1
, t0
, t1
);
2723 tcg_gen_and_tl(t1
, t1
, t2
);
2725 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2727 /* operands of different sign, first operand and result different sign */
2728 generate_exception(ctx
, EXCP_OVERFLOW
);
2730 gen_store_gpr(t0
, rd
);
2735 if (rs
!= 0 && rt
!= 0) {
2736 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2737 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2738 } else if (rs
== 0 && rt
!= 0) {
2739 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2740 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2741 } else if (rs
!= 0 && rt
== 0) {
2742 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2744 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2747 #if defined(TARGET_MIPS64)
2750 TCGv t0
= tcg_temp_local_new();
2751 TCGv t1
= tcg_temp_new();
2752 TCGv t2
= tcg_temp_new();
2753 TCGLabel
*l1
= gen_new_label();
2755 gen_load_gpr(t1
, rs
);
2756 gen_load_gpr(t2
, rt
);
2757 tcg_gen_add_tl(t0
, t1
, t2
);
2758 tcg_gen_xor_tl(t1
, t1
, t2
);
2759 tcg_gen_xor_tl(t2
, t0
, t2
);
2760 tcg_gen_andc_tl(t1
, t2
, t1
);
2762 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2764 /* operands of same sign, result different sign */
2765 generate_exception(ctx
, EXCP_OVERFLOW
);
2767 gen_store_gpr(t0
, rd
);
2772 if (rs
!= 0 && rt
!= 0) {
2773 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2774 } else if (rs
== 0 && rt
!= 0) {
2775 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2776 } else if (rs
!= 0 && rt
== 0) {
2777 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2779 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2784 TCGv t0
= tcg_temp_local_new();
2785 TCGv t1
= tcg_temp_new();
2786 TCGv t2
= tcg_temp_new();
2787 TCGLabel
*l1
= gen_new_label();
2789 gen_load_gpr(t1
, rs
);
2790 gen_load_gpr(t2
, rt
);
2791 tcg_gen_sub_tl(t0
, t1
, t2
);
2792 tcg_gen_xor_tl(t2
, t1
, t2
);
2793 tcg_gen_xor_tl(t1
, t0
, t1
);
2794 tcg_gen_and_tl(t1
, t1
, t2
);
2796 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2798 /* operands of different sign, first operand and result different sign */
2799 generate_exception(ctx
, EXCP_OVERFLOW
);
2801 gen_store_gpr(t0
, rd
);
2806 if (rs
!= 0 && rt
!= 0) {
2807 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2808 } else if (rs
== 0 && rt
!= 0) {
2809 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2810 } else if (rs
!= 0 && rt
== 0) {
2811 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2813 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2818 if (likely(rs
!= 0 && rt
!= 0)) {
2819 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2820 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2822 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2828 /* Conditional move */
2829 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2830 int rd
, int rs
, int rt
)
2835 /* If no destination, treat it as a NOP. */
2839 t0
= tcg_temp_new();
2840 gen_load_gpr(t0
, rt
);
2841 t1
= tcg_const_tl(0);
2842 t2
= tcg_temp_new();
2843 gen_load_gpr(t2
, rs
);
2846 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2849 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2852 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2855 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2864 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2865 int rd
, int rs
, int rt
)
2868 /* If no destination, treat it as a NOP. */
2874 if (likely(rs
!= 0 && rt
!= 0)) {
2875 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2877 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2881 if (rs
!= 0 && rt
!= 0) {
2882 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2883 } else if (rs
== 0 && rt
!= 0) {
2884 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2885 } else if (rs
!= 0 && rt
== 0) {
2886 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2888 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2892 if (likely(rs
!= 0 && rt
!= 0)) {
2893 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2894 } else if (rs
== 0 && rt
!= 0) {
2895 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2896 } else if (rs
!= 0 && rt
== 0) {
2897 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2899 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2903 if (likely(rs
!= 0 && rt
!= 0)) {
2904 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2905 } else if (rs
== 0 && rt
!= 0) {
2906 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2907 } else if (rs
!= 0 && rt
== 0) {
2908 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2910 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2916 /* Set on lower than */
2917 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2918 int rd
, int rs
, int rt
)
2923 /* If no destination, treat it as a NOP. */
2927 t0
= tcg_temp_new();
2928 t1
= tcg_temp_new();
2929 gen_load_gpr(t0
, rs
);
2930 gen_load_gpr(t1
, rt
);
2933 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2936 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2944 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2945 int rd
, int rs
, int rt
)
2950 /* If no destination, treat it as a NOP.
2951 For add & sub, we must generate the overflow exception when needed. */
2955 t0
= tcg_temp_new();
2956 t1
= tcg_temp_new();
2957 gen_load_gpr(t0
, rs
);
2958 gen_load_gpr(t1
, rt
);
2961 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2962 tcg_gen_shl_tl(t0
, t1
, t0
);
2963 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2966 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2967 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2970 tcg_gen_ext32u_tl(t1
, t1
);
2971 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2972 tcg_gen_shr_tl(t0
, t1
, t0
);
2973 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2977 TCGv_i32 t2
= tcg_temp_new_i32();
2978 TCGv_i32 t3
= tcg_temp_new_i32();
2980 tcg_gen_trunc_tl_i32(t2
, t0
);
2981 tcg_gen_trunc_tl_i32(t3
, t1
);
2982 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2983 tcg_gen_rotr_i32(t2
, t3
, t2
);
2984 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2985 tcg_temp_free_i32(t2
);
2986 tcg_temp_free_i32(t3
);
2989 #if defined(TARGET_MIPS64)
2991 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2992 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3003 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3004 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3012 /* Arithmetic on HI/LO registers */
3013 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3015 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3026 #if defined(TARGET_MIPS64)
3028 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3032 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3036 #if defined(TARGET_MIPS64)
3038 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3042 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3047 #if defined(TARGET_MIPS64)
3049 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3053 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3056 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3061 #if defined(TARGET_MIPS64)
3063 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3067 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3070 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3076 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3079 TCGv t0
= tcg_const_tl(addr
);
3080 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3081 gen_store_gpr(t0
, reg
);
3085 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3091 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3094 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3095 addr
= addr_add(ctx
, pc
, offset
);
3096 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3100 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3101 addr
= addr_add(ctx
, pc
, offset
);
3102 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3104 #if defined(TARGET_MIPS64)
3107 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3108 addr
= addr_add(ctx
, pc
, offset
);
3109 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3113 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3116 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3117 addr
= addr_add(ctx
, pc
, offset
);
3118 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3123 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3124 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3125 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3128 #if defined(TARGET_MIPS64)
3129 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3130 case R6_OPC_LDPC
+ (1 << 16):
3131 case R6_OPC_LDPC
+ (2 << 16):
3132 case R6_OPC_LDPC
+ (3 << 16):
3134 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3135 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3136 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3140 MIPS_INVAL("OPC_PCREL");
3141 generate_exception_end(ctx
, EXCP_RI
);
3148 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3157 t0
= tcg_temp_new();
3158 t1
= tcg_temp_new();
3160 gen_load_gpr(t0
, rs
);
3161 gen_load_gpr(t1
, rt
);
3166 TCGv t2
= tcg_temp_new();
3167 TCGv t3
= tcg_temp_new();
3168 tcg_gen_ext32s_tl(t0
, t0
);
3169 tcg_gen_ext32s_tl(t1
, t1
);
3170 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3171 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3172 tcg_gen_and_tl(t2
, t2
, t3
);
3173 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3174 tcg_gen_or_tl(t2
, t2
, t3
);
3175 tcg_gen_movi_tl(t3
, 0);
3176 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3177 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3178 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3185 TCGv t2
= tcg_temp_new();
3186 TCGv t3
= tcg_temp_new();
3187 tcg_gen_ext32s_tl(t0
, t0
);
3188 tcg_gen_ext32s_tl(t1
, t1
);
3189 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3190 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3191 tcg_gen_and_tl(t2
, t2
, t3
);
3192 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3193 tcg_gen_or_tl(t2
, t2
, t3
);
3194 tcg_gen_movi_tl(t3
, 0);
3195 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3196 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3197 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3204 TCGv t2
= tcg_const_tl(0);
3205 TCGv t3
= tcg_const_tl(1);
3206 tcg_gen_ext32u_tl(t0
, t0
);
3207 tcg_gen_ext32u_tl(t1
, t1
);
3208 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3209 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3210 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3217 TCGv t2
= tcg_const_tl(0);
3218 TCGv t3
= tcg_const_tl(1);
3219 tcg_gen_ext32u_tl(t0
, t0
);
3220 tcg_gen_ext32u_tl(t1
, t1
);
3221 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3222 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3223 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3230 TCGv_i32 t2
= tcg_temp_new_i32();
3231 TCGv_i32 t3
= tcg_temp_new_i32();
3232 tcg_gen_trunc_tl_i32(t2
, t0
);
3233 tcg_gen_trunc_tl_i32(t3
, t1
);
3234 tcg_gen_mul_i32(t2
, t2
, t3
);
3235 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3236 tcg_temp_free_i32(t2
);
3237 tcg_temp_free_i32(t3
);
3242 TCGv_i32 t2
= tcg_temp_new_i32();
3243 TCGv_i32 t3
= tcg_temp_new_i32();
3244 tcg_gen_trunc_tl_i32(t2
, t0
);
3245 tcg_gen_trunc_tl_i32(t3
, t1
);
3246 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3247 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3248 tcg_temp_free_i32(t2
);
3249 tcg_temp_free_i32(t3
);
3254 TCGv_i32 t2
= tcg_temp_new_i32();
3255 TCGv_i32 t3
= tcg_temp_new_i32();
3256 tcg_gen_trunc_tl_i32(t2
, t0
);
3257 tcg_gen_trunc_tl_i32(t3
, t1
);
3258 tcg_gen_mul_i32(t2
, t2
, t3
);
3259 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3260 tcg_temp_free_i32(t2
);
3261 tcg_temp_free_i32(t3
);
3266 TCGv_i32 t2
= tcg_temp_new_i32();
3267 TCGv_i32 t3
= tcg_temp_new_i32();
3268 tcg_gen_trunc_tl_i32(t2
, t0
);
3269 tcg_gen_trunc_tl_i32(t3
, t1
);
3270 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3271 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3272 tcg_temp_free_i32(t2
);
3273 tcg_temp_free_i32(t3
);
3276 #if defined(TARGET_MIPS64)
3279 TCGv t2
= tcg_temp_new();
3280 TCGv t3
= tcg_temp_new();
3281 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3282 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3283 tcg_gen_and_tl(t2
, t2
, t3
);
3284 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3285 tcg_gen_or_tl(t2
, t2
, t3
);
3286 tcg_gen_movi_tl(t3
, 0);
3287 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3288 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3295 TCGv t2
= tcg_temp_new();
3296 TCGv t3
= tcg_temp_new();
3297 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3298 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3299 tcg_gen_and_tl(t2
, t2
, t3
);
3300 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3301 tcg_gen_or_tl(t2
, t2
, t3
);
3302 tcg_gen_movi_tl(t3
, 0);
3303 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3304 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3311 TCGv t2
= tcg_const_tl(0);
3312 TCGv t3
= tcg_const_tl(1);
3313 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3314 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3321 TCGv t2
= tcg_const_tl(0);
3322 TCGv t3
= tcg_const_tl(1);
3323 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3324 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3330 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3334 TCGv t2
= tcg_temp_new();
3335 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3340 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3344 TCGv t2
= tcg_temp_new();
3345 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3351 MIPS_INVAL("r6 mul/div");
3352 generate_exception_end(ctx
, EXCP_RI
);
3360 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3361 int acc
, int rs
, int rt
)
3365 t0
= tcg_temp_new();
3366 t1
= tcg_temp_new();
3368 gen_load_gpr(t0
, rs
);
3369 gen_load_gpr(t1
, rt
);
3378 TCGv t2
= tcg_temp_new();
3379 TCGv t3
= tcg_temp_new();
3380 tcg_gen_ext32s_tl(t0
, t0
);
3381 tcg_gen_ext32s_tl(t1
, t1
);
3382 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3383 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3384 tcg_gen_and_tl(t2
, t2
, t3
);
3385 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3386 tcg_gen_or_tl(t2
, t2
, t3
);
3387 tcg_gen_movi_tl(t3
, 0);
3388 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3389 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3390 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3391 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3392 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3399 TCGv t2
= tcg_const_tl(0);
3400 TCGv t3
= tcg_const_tl(1);
3401 tcg_gen_ext32u_tl(t0
, t0
);
3402 tcg_gen_ext32u_tl(t1
, t1
);
3403 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3404 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3405 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3406 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3407 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3414 TCGv_i32 t2
= tcg_temp_new_i32();
3415 TCGv_i32 t3
= tcg_temp_new_i32();
3416 tcg_gen_trunc_tl_i32(t2
, t0
);
3417 tcg_gen_trunc_tl_i32(t3
, t1
);
3418 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3419 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3420 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3421 tcg_temp_free_i32(t2
);
3422 tcg_temp_free_i32(t3
);
3427 TCGv_i32 t2
= tcg_temp_new_i32();
3428 TCGv_i32 t3
= tcg_temp_new_i32();
3429 tcg_gen_trunc_tl_i32(t2
, t0
);
3430 tcg_gen_trunc_tl_i32(t3
, t1
);
3431 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3432 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3433 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3434 tcg_temp_free_i32(t2
);
3435 tcg_temp_free_i32(t3
);
3438 #if defined(TARGET_MIPS64)
3441 TCGv t2
= tcg_temp_new();
3442 TCGv t3
= tcg_temp_new();
3443 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3444 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3445 tcg_gen_and_tl(t2
, t2
, t3
);
3446 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3447 tcg_gen_or_tl(t2
, t2
, t3
);
3448 tcg_gen_movi_tl(t3
, 0);
3449 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3450 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3451 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3458 TCGv t2
= tcg_const_tl(0);
3459 TCGv t3
= tcg_const_tl(1);
3460 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3461 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3462 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3468 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3471 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3476 TCGv_i64 t2
= tcg_temp_new_i64();
3477 TCGv_i64 t3
= tcg_temp_new_i64();
3479 tcg_gen_ext_tl_i64(t2
, t0
);
3480 tcg_gen_ext_tl_i64(t3
, t1
);
3481 tcg_gen_mul_i64(t2
, t2
, t3
);
3482 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3483 tcg_gen_add_i64(t2
, t2
, t3
);
3484 tcg_temp_free_i64(t3
);
3485 gen_move_low32(cpu_LO
[acc
], t2
);
3486 gen_move_high32(cpu_HI
[acc
], t2
);
3487 tcg_temp_free_i64(t2
);
3492 TCGv_i64 t2
= tcg_temp_new_i64();
3493 TCGv_i64 t3
= tcg_temp_new_i64();
3495 tcg_gen_ext32u_tl(t0
, t0
);
3496 tcg_gen_ext32u_tl(t1
, t1
);
3497 tcg_gen_extu_tl_i64(t2
, t0
);
3498 tcg_gen_extu_tl_i64(t3
, t1
);
3499 tcg_gen_mul_i64(t2
, t2
, t3
);
3500 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3501 tcg_gen_add_i64(t2
, t2
, t3
);
3502 tcg_temp_free_i64(t3
);
3503 gen_move_low32(cpu_LO
[acc
], t2
);
3504 gen_move_high32(cpu_HI
[acc
], t2
);
3505 tcg_temp_free_i64(t2
);
3510 TCGv_i64 t2
= tcg_temp_new_i64();
3511 TCGv_i64 t3
= tcg_temp_new_i64();
3513 tcg_gen_ext_tl_i64(t2
, t0
);
3514 tcg_gen_ext_tl_i64(t3
, t1
);
3515 tcg_gen_mul_i64(t2
, t2
, t3
);
3516 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3517 tcg_gen_sub_i64(t2
, t3
, t2
);
3518 tcg_temp_free_i64(t3
);
3519 gen_move_low32(cpu_LO
[acc
], t2
);
3520 gen_move_high32(cpu_HI
[acc
], t2
);
3521 tcg_temp_free_i64(t2
);
3526 TCGv_i64 t2
= tcg_temp_new_i64();
3527 TCGv_i64 t3
= tcg_temp_new_i64();
3529 tcg_gen_ext32u_tl(t0
, t0
);
3530 tcg_gen_ext32u_tl(t1
, t1
);
3531 tcg_gen_extu_tl_i64(t2
, t0
);
3532 tcg_gen_extu_tl_i64(t3
, t1
);
3533 tcg_gen_mul_i64(t2
, t2
, t3
);
3534 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3535 tcg_gen_sub_i64(t2
, t3
, t2
);
3536 tcg_temp_free_i64(t3
);
3537 gen_move_low32(cpu_LO
[acc
], t2
);
3538 gen_move_high32(cpu_HI
[acc
], t2
);
3539 tcg_temp_free_i64(t2
);
3543 MIPS_INVAL("mul/div");
3544 generate_exception_end(ctx
, EXCP_RI
);
3552 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3553 int rd
, int rs
, int rt
)
3555 TCGv t0
= tcg_temp_new();
3556 TCGv t1
= tcg_temp_new();
3558 gen_load_gpr(t0
, rs
);
3559 gen_load_gpr(t1
, rt
);
3562 case OPC_VR54XX_MULS
:
3563 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3565 case OPC_VR54XX_MULSU
:
3566 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3568 case OPC_VR54XX_MACC
:
3569 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3571 case OPC_VR54XX_MACCU
:
3572 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3574 case OPC_VR54XX_MSAC
:
3575 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3577 case OPC_VR54XX_MSACU
:
3578 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3580 case OPC_VR54XX_MULHI
:
3581 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3583 case OPC_VR54XX_MULHIU
:
3584 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3586 case OPC_VR54XX_MULSHI
:
3587 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3589 case OPC_VR54XX_MULSHIU
:
3590 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3592 case OPC_VR54XX_MACCHI
:
3593 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3595 case OPC_VR54XX_MACCHIU
:
3596 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3598 case OPC_VR54XX_MSACHI
:
3599 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3601 case OPC_VR54XX_MSACHIU
:
3602 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3605 MIPS_INVAL("mul vr54xx");
3606 generate_exception_end(ctx
, EXCP_RI
);
3609 gen_store_gpr(t0
, rd
);
3616 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3625 t0
= tcg_temp_new();
3626 gen_load_gpr(t0
, rs
);
3630 gen_helper_clo(cpu_gpr
[rd
], t0
);
3634 gen_helper_clz(cpu_gpr
[rd
], t0
);
3636 #if defined(TARGET_MIPS64)
3639 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3643 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3650 /* Godson integer instructions */
3651 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3652 int rd
, int rs
, int rt
)
3664 case OPC_MULTU_G_2E
:
3665 case OPC_MULTU_G_2F
:
3666 #if defined(TARGET_MIPS64)
3667 case OPC_DMULT_G_2E
:
3668 case OPC_DMULT_G_2F
:
3669 case OPC_DMULTU_G_2E
:
3670 case OPC_DMULTU_G_2F
:
3672 t0
= tcg_temp_new();
3673 t1
= tcg_temp_new();
3676 t0
= tcg_temp_local_new();
3677 t1
= tcg_temp_local_new();
3681 gen_load_gpr(t0
, rs
);
3682 gen_load_gpr(t1
, rt
);
3687 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3688 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3690 case OPC_MULTU_G_2E
:
3691 case OPC_MULTU_G_2F
:
3692 tcg_gen_ext32u_tl(t0
, t0
);
3693 tcg_gen_ext32u_tl(t1
, t1
);
3694 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3695 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3700 TCGLabel
*l1
= gen_new_label();
3701 TCGLabel
*l2
= gen_new_label();
3702 TCGLabel
*l3
= gen_new_label();
3703 tcg_gen_ext32s_tl(t0
, t0
);
3704 tcg_gen_ext32s_tl(t1
, t1
);
3705 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3706 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3710 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3711 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3714 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3715 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3722 TCGLabel
*l1
= gen_new_label();
3723 TCGLabel
*l2
= gen_new_label();
3724 tcg_gen_ext32u_tl(t0
, t0
);
3725 tcg_gen_ext32u_tl(t1
, t1
);
3726 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3727 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3730 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3731 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3738 TCGLabel
*l1
= gen_new_label();
3739 TCGLabel
*l2
= gen_new_label();
3740 TCGLabel
*l3
= gen_new_label();
3741 tcg_gen_ext32u_tl(t0
, t0
);
3742 tcg_gen_ext32u_tl(t1
, t1
);
3743 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3744 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3745 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3747 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3750 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3751 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3758 TCGLabel
*l1
= gen_new_label();
3759 TCGLabel
*l2
= gen_new_label();
3760 tcg_gen_ext32u_tl(t0
, t0
);
3761 tcg_gen_ext32u_tl(t1
, t1
);
3762 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3763 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3766 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3767 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3771 #if defined(TARGET_MIPS64)
3772 case OPC_DMULT_G_2E
:
3773 case OPC_DMULT_G_2F
:
3774 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3776 case OPC_DMULTU_G_2E
:
3777 case OPC_DMULTU_G_2F
:
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3783 TCGLabel
*l1
= gen_new_label();
3784 TCGLabel
*l2
= gen_new_label();
3785 TCGLabel
*l3
= gen_new_label();
3786 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3787 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3791 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3792 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3795 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3799 case OPC_DDIVU_G_2E
:
3800 case OPC_DDIVU_G_2F
:
3802 TCGLabel
*l1
= gen_new_label();
3803 TCGLabel
*l2
= gen_new_label();
3804 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3805 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3808 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3815 TCGLabel
*l1
= gen_new_label();
3816 TCGLabel
*l2
= gen_new_label();
3817 TCGLabel
*l3
= gen_new_label();
3818 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3819 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3820 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3822 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3825 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3829 case OPC_DMODU_G_2E
:
3830 case OPC_DMODU_G_2F
:
3832 TCGLabel
*l1
= gen_new_label();
3833 TCGLabel
*l2
= gen_new_label();
3834 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3835 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3838 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3849 /* Loongson multimedia instructions */
3850 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3852 uint32_t opc
, shift_max
;
3855 opc
= MASK_LMI(ctx
->opcode
);
3861 t0
= tcg_temp_local_new_i64();
3862 t1
= tcg_temp_local_new_i64();
3865 t0
= tcg_temp_new_i64();
3866 t1
= tcg_temp_new_i64();
3870 gen_load_fpr64(ctx
, t0
, rs
);
3871 gen_load_fpr64(ctx
, t1
, rt
);
3873 #define LMI_HELPER(UP, LO) \
3874 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3875 #define LMI_HELPER_1(UP, LO) \
3876 case OPC_##UP: gen_helper_##LO(t0, t0); break
3877 #define LMI_DIRECT(UP, LO, OP) \
3878 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3881 LMI_HELPER(PADDSH
, paddsh
);
3882 LMI_HELPER(PADDUSH
, paddush
);
3883 LMI_HELPER(PADDH
, paddh
);
3884 LMI_HELPER(PADDW
, paddw
);
3885 LMI_HELPER(PADDSB
, paddsb
);
3886 LMI_HELPER(PADDUSB
, paddusb
);
3887 LMI_HELPER(PADDB
, paddb
);
3889 LMI_HELPER(PSUBSH
, psubsh
);
3890 LMI_HELPER(PSUBUSH
, psubush
);
3891 LMI_HELPER(PSUBH
, psubh
);
3892 LMI_HELPER(PSUBW
, psubw
);
3893 LMI_HELPER(PSUBSB
, psubsb
);
3894 LMI_HELPER(PSUBUSB
, psubusb
);
3895 LMI_HELPER(PSUBB
, psubb
);
3897 LMI_HELPER(PSHUFH
, pshufh
);
3898 LMI_HELPER(PACKSSWH
, packsswh
);
3899 LMI_HELPER(PACKSSHB
, packsshb
);
3900 LMI_HELPER(PACKUSHB
, packushb
);
3902 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3903 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3904 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3905 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3906 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3907 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3909 LMI_HELPER(PAVGH
, pavgh
);
3910 LMI_HELPER(PAVGB
, pavgb
);
3911 LMI_HELPER(PMAXSH
, pmaxsh
);
3912 LMI_HELPER(PMINSH
, pminsh
);
3913 LMI_HELPER(PMAXUB
, pmaxub
);
3914 LMI_HELPER(PMINUB
, pminub
);
3916 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3917 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3918 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3919 LMI_HELPER(PCMPGTH
, pcmpgth
);
3920 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3921 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3923 LMI_HELPER(PSLLW
, psllw
);
3924 LMI_HELPER(PSLLH
, psllh
);
3925 LMI_HELPER(PSRLW
, psrlw
);
3926 LMI_HELPER(PSRLH
, psrlh
);
3927 LMI_HELPER(PSRAW
, psraw
);
3928 LMI_HELPER(PSRAH
, psrah
);
3930 LMI_HELPER(PMULLH
, pmullh
);
3931 LMI_HELPER(PMULHH
, pmulhh
);
3932 LMI_HELPER(PMULHUH
, pmulhuh
);
3933 LMI_HELPER(PMADDHW
, pmaddhw
);
3935 LMI_HELPER(PASUBUB
, pasubub
);
3936 LMI_HELPER_1(BIADD
, biadd
);
3937 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3939 LMI_DIRECT(PADDD
, paddd
, add
);
3940 LMI_DIRECT(PSUBD
, psubd
, sub
);
3941 LMI_DIRECT(XOR_CP2
, xor, xor);
3942 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3943 LMI_DIRECT(AND_CP2
, and, and);
3944 LMI_DIRECT(PANDN
, pandn
, andc
);
3945 LMI_DIRECT(OR
, or, or);
3948 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3951 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3954 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3957 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3961 tcg_gen_andi_i64(t1
, t1
, 3);
3962 tcg_gen_shli_i64(t1
, t1
, 4);
3963 tcg_gen_shr_i64(t0
, t0
, t1
);
3964 tcg_gen_ext16u_i64(t0
, t0
);
3968 tcg_gen_add_i64(t0
, t0
, t1
);
3969 tcg_gen_ext32s_i64(t0
, t0
);
3972 tcg_gen_sub_i64(t0
, t0
, t1
);
3973 tcg_gen_ext32s_i64(t0
, t0
);
3995 /* Make sure shift count isn't TCG undefined behaviour. */
3996 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4001 tcg_gen_shl_i64(t0
, t0
, t1
);
4005 /* Since SRA is UndefinedResult without sign-extended inputs,
4006 we can treat SRA and DSRA the same. */
4007 tcg_gen_sar_i64(t0
, t0
, t1
);
4010 /* We want to shift in zeros for SRL; zero-extend first. */
4011 tcg_gen_ext32u_i64(t0
, t0
);
4014 tcg_gen_shr_i64(t0
, t0
, t1
);
4018 if (shift_max
== 32) {
4019 tcg_gen_ext32s_i64(t0
, t0
);
4022 /* Shifts larger than MAX produce zero. */
4023 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4024 tcg_gen_neg_i64(t1
, t1
);
4025 tcg_gen_and_i64(t0
, t0
, t1
);
4031 TCGv_i64 t2
= tcg_temp_new_i64();
4032 TCGLabel
*lab
= gen_new_label();
4034 tcg_gen_mov_i64(t2
, t0
);
4035 tcg_gen_add_i64(t0
, t1
, t2
);
4036 if (opc
== OPC_ADD_CP2
) {
4037 tcg_gen_ext32s_i64(t0
, t0
);
4039 tcg_gen_xor_i64(t1
, t1
, t2
);
4040 tcg_gen_xor_i64(t2
, t2
, t0
);
4041 tcg_gen_andc_i64(t1
, t2
, t1
);
4042 tcg_temp_free_i64(t2
);
4043 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4044 generate_exception(ctx
, EXCP_OVERFLOW
);
4052 TCGv_i64 t2
= tcg_temp_new_i64();
4053 TCGLabel
*lab
= gen_new_label();
4055 tcg_gen_mov_i64(t2
, t0
);
4056 tcg_gen_sub_i64(t0
, t1
, t2
);
4057 if (opc
== OPC_SUB_CP2
) {
4058 tcg_gen_ext32s_i64(t0
, t0
);
4060 tcg_gen_xor_i64(t1
, t1
, t2
);
4061 tcg_gen_xor_i64(t2
, t2
, t0
);
4062 tcg_gen_and_i64(t1
, t1
, t2
);
4063 tcg_temp_free_i64(t2
);
4064 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4065 generate_exception(ctx
, EXCP_OVERFLOW
);
4071 tcg_gen_ext32u_i64(t0
, t0
);
4072 tcg_gen_ext32u_i64(t1
, t1
);
4073 tcg_gen_mul_i64(t0
, t0
, t1
);
4082 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4083 FD field is the CC field? */
4085 MIPS_INVAL("loongson_cp2");
4086 generate_exception_end(ctx
, EXCP_RI
);
4093 gen_store_fpr64(ctx
, t0
, rd
);
4095 tcg_temp_free_i64(t0
);
4096 tcg_temp_free_i64(t1
);
4100 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4101 int rs
, int rt
, int16_t imm
)
4104 TCGv t0
= tcg_temp_new();
4105 TCGv t1
= tcg_temp_new();
4108 /* Load needed operands */
4116 /* Compare two registers */
4118 gen_load_gpr(t0
, rs
);
4119 gen_load_gpr(t1
, rt
);
4129 /* Compare register to immediate */
4130 if (rs
!= 0 || imm
!= 0) {
4131 gen_load_gpr(t0
, rs
);
4132 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4139 case OPC_TEQ
: /* rs == rs */
4140 case OPC_TEQI
: /* r0 == 0 */
4141 case OPC_TGE
: /* rs >= rs */
4142 case OPC_TGEI
: /* r0 >= 0 */
4143 case OPC_TGEU
: /* rs >= rs unsigned */
4144 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4146 generate_exception_end(ctx
, EXCP_TRAP
);
4148 case OPC_TLT
: /* rs < rs */
4149 case OPC_TLTI
: /* r0 < 0 */
4150 case OPC_TLTU
: /* rs < rs unsigned */
4151 case OPC_TLTIU
: /* r0 < 0 unsigned */
4152 case OPC_TNE
: /* rs != rs */
4153 case OPC_TNEI
: /* r0 != 0 */
4154 /* Never trap: treat as NOP. */
4158 TCGLabel
*l1
= gen_new_label();
4163 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4167 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4171 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4175 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4179 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4183 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4186 generate_exception(ctx
, EXCP_TRAP
);
4193 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4195 TranslationBlock
*tb
;
4197 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4198 likely(!ctx
->singlestep_enabled
)) {
4201 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4204 if (ctx
->singlestep_enabled
) {
4205 save_cpu_state(ctx
, 0);
4206 gen_helper_raise_exception_debug(cpu_env
);
4212 /* Branches (before delay slot) */
4213 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4215 int rs
, int rt
, int32_t offset
,
4218 target_ulong btgt
= -1;
4220 int bcond_compute
= 0;
4221 TCGv t0
= tcg_temp_new();
4222 TCGv t1
= tcg_temp_new();
4224 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4225 #ifdef MIPS_DEBUG_DISAS
4226 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4227 TARGET_FMT_lx
"\n", ctx
->pc
);
4229 generate_exception_end(ctx
, EXCP_RI
);
4233 /* Load needed operands */
4239 /* Compare two registers */
4241 gen_load_gpr(t0
, rs
);
4242 gen_load_gpr(t1
, rt
);
4245 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4259 /* Compare to zero */
4261 gen_load_gpr(t0
, rs
);
4264 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4267 #if defined(TARGET_MIPS64)
4269 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4271 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4274 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4279 /* Jump to immediate */
4280 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4284 /* Jump to register */
4285 if (offset
!= 0 && offset
!= 16) {
4286 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4287 others are reserved. */
4288 MIPS_INVAL("jump hint");
4289 generate_exception_end(ctx
, EXCP_RI
);
4292 gen_load_gpr(btarget
, rs
);
4295 MIPS_INVAL("branch/jump");
4296 generate_exception_end(ctx
, EXCP_RI
);
4299 if (bcond_compute
== 0) {
4300 /* No condition to be computed */
4302 case OPC_BEQ
: /* rx == rx */
4303 case OPC_BEQL
: /* rx == rx likely */
4304 case OPC_BGEZ
: /* 0 >= 0 */
4305 case OPC_BGEZL
: /* 0 >= 0 likely */
4306 case OPC_BLEZ
: /* 0 <= 0 */
4307 case OPC_BLEZL
: /* 0 <= 0 likely */
4309 ctx
->hflags
|= MIPS_HFLAG_B
;
4311 case OPC_BGEZAL
: /* 0 >= 0 */
4312 case OPC_BGEZALL
: /* 0 >= 0 likely */
4313 /* Always take and link */
4315 ctx
->hflags
|= MIPS_HFLAG_B
;
4317 case OPC_BNE
: /* rx != rx */
4318 case OPC_BGTZ
: /* 0 > 0 */
4319 case OPC_BLTZ
: /* 0 < 0 */
4322 case OPC_BLTZAL
: /* 0 < 0 */
4323 /* Handle as an unconditional branch to get correct delay
4326 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4327 ctx
->hflags
|= MIPS_HFLAG_B
;
4329 case OPC_BLTZALL
: /* 0 < 0 likely */
4330 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4331 /* Skip the instruction in the delay slot */
4334 case OPC_BNEL
: /* rx != rx likely */
4335 case OPC_BGTZL
: /* 0 > 0 likely */
4336 case OPC_BLTZL
: /* 0 < 0 likely */
4337 /* Skip the instruction in the delay slot */
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4344 ctx
->hflags
|= MIPS_HFLAG_BX
;
4348 ctx
->hflags
|= MIPS_HFLAG_B
;
4351 ctx
->hflags
|= MIPS_HFLAG_BR
;
4355 ctx
->hflags
|= MIPS_HFLAG_BR
;
4358 MIPS_INVAL("branch/jump");
4359 generate_exception_end(ctx
, EXCP_RI
);
4365 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4368 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4371 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4374 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4377 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4380 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4383 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4387 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4391 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4397 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4400 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4403 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4406 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4409 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4411 #if defined(TARGET_MIPS64)
4413 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 ctx
->hflags
|= MIPS_HFLAG_BC
;
4423 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4426 ctx
->hflags
|= MIPS_HFLAG_BL
;
4429 MIPS_INVAL("conditional branch/jump");
4430 generate_exception_end(ctx
, EXCP_RI
);
4435 ctx
->btarget
= btgt
;
4437 switch (delayslot_size
) {
4439 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4442 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4447 int post_delay
= insn_bytes
+ delayslot_size
;
4448 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4450 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4454 if (insn_bytes
== 2)
4455 ctx
->hflags
|= MIPS_HFLAG_B16
;
4460 /* special3 bitfield operations */
4461 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4462 int rs
, int lsb
, int msb
)
4464 TCGv t0
= tcg_temp_new();
4465 TCGv t1
= tcg_temp_new();
4467 gen_load_gpr(t1
, rs
);
4470 if (lsb
+ msb
> 31) {
4473 tcg_gen_shri_tl(t0
, t1
, lsb
);
4475 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4477 tcg_gen_ext32s_tl(t0
, t0
);
4480 #if defined(TARGET_MIPS64)
4489 if (lsb
+ msb
> 63) {
4492 tcg_gen_shri_tl(t0
, t1
, lsb
);
4494 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4502 gen_load_gpr(t0
, rt
);
4503 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4504 tcg_gen_ext32s_tl(t0
, t0
);
4506 #if defined(TARGET_MIPS64)
4517 gen_load_gpr(t0
, rt
);
4518 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4523 MIPS_INVAL("bitops");
4524 generate_exception_end(ctx
, EXCP_RI
);
4529 gen_store_gpr(t0
, rt
);
4534 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4539 /* If no destination, treat it as a NOP. */
4543 t0
= tcg_temp_new();
4544 gen_load_gpr(t0
, rt
);
4548 TCGv t1
= tcg_temp_new();
4550 tcg_gen_shri_tl(t1
, t0
, 8);
4551 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4552 tcg_gen_shli_tl(t0
, t0
, 8);
4553 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4554 tcg_gen_or_tl(t0
, t0
, t1
);
4556 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4560 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4563 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4565 #if defined(TARGET_MIPS64)
4568 TCGv t1
= tcg_temp_new();
4570 tcg_gen_shri_tl(t1
, t0
, 8);
4571 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4572 tcg_gen_shli_tl(t0
, t0
, 8);
4573 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4574 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4580 TCGv t1
= tcg_temp_new();
4582 tcg_gen_shri_tl(t1
, t0
, 16);
4583 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4584 tcg_gen_shli_tl(t0
, t0
, 16);
4585 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4586 tcg_gen_or_tl(t0
, t0
, t1
);
4587 tcg_gen_shri_tl(t1
, t0
, 32);
4588 tcg_gen_shli_tl(t0
, t0
, 32);
4589 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4595 MIPS_INVAL("bsfhl");
4596 generate_exception_end(ctx
, EXCP_RI
);
4603 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4612 t0
= tcg_temp_new();
4613 t1
= tcg_temp_new();
4614 gen_load_gpr(t0
, rs
);
4615 gen_load_gpr(t1
, rt
);
4616 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4617 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4618 if (opc
== OPC_LSA
) {
4619 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4628 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4636 t0
= tcg_temp_new();
4637 gen_load_gpr(t0
, rt
);
4641 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4643 #if defined(TARGET_MIPS64)
4645 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4650 TCGv t1
= tcg_temp_new();
4651 gen_load_gpr(t1
, rs
);
4655 TCGv_i64 t2
= tcg_temp_new_i64();
4656 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4657 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4658 gen_move_low32(cpu_gpr
[rd
], t2
);
4659 tcg_temp_free_i64(t2
);
4662 #if defined(TARGET_MIPS64)
4664 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4665 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4666 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4676 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4683 t0
= tcg_temp_new();
4684 gen_load_gpr(t0
, rt
);
4687 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4689 #if defined(TARGET_MIPS64)
4691 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4698 #ifndef CONFIG_USER_ONLY
4699 /* CP0 (MMU and control) */
4700 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4702 TCGv_i64 t0
= tcg_temp_new_i64();
4703 TCGv_i64 t1
= tcg_temp_new_i64();
4705 tcg_gen_ext_tl_i64(t0
, arg
);
4706 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4707 #if defined(TARGET_MIPS64)
4708 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4710 tcg_gen_concat32_i64(t1
, t1
, t0
);
4712 tcg_gen_st_i64(t1
, cpu_env
, off
);
4713 tcg_temp_free_i64(t1
);
4714 tcg_temp_free_i64(t0
);
4717 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4719 TCGv_i64 t0
= tcg_temp_new_i64();
4720 TCGv_i64 t1
= tcg_temp_new_i64();
4722 tcg_gen_ext_tl_i64(t0
, arg
);
4723 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4724 tcg_gen_concat32_i64(t1
, t1
, t0
);
4725 tcg_gen_st_i64(t1
, cpu_env
, off
);
4726 tcg_temp_free_i64(t1
);
4727 tcg_temp_free_i64(t0
);
4730 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4732 TCGv_i64 t0
= tcg_temp_new_i64();
4734 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4735 #if defined(TARGET_MIPS64)
4736 tcg_gen_shri_i64(t0
, t0
, 30);
4738 tcg_gen_shri_i64(t0
, t0
, 32);
4740 gen_move_low32(arg
, t0
);
4741 tcg_temp_free_i64(t0
);
4744 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4746 TCGv_i64 t0
= tcg_temp_new_i64();
4748 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4749 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4750 gen_move_low32(arg
, t0
);
4751 tcg_temp_free_i64(t0
);
4754 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4756 TCGv_i32 t0
= tcg_temp_new_i32();
4758 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4759 tcg_gen_ext_i32_tl(arg
, t0
);
4760 tcg_temp_free_i32(t0
);
4763 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4765 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4766 tcg_gen_ext32s_tl(arg
, arg
);
4769 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4771 TCGv_i32 t0
= tcg_temp_new_i32();
4773 tcg_gen_trunc_tl_i32(t0
, arg
);
4774 tcg_gen_st_i32(t0
, cpu_env
, off
);
4775 tcg_temp_free_i32(t0
);
4778 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4780 const char *rn
= "invalid";
4782 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4783 goto mfhc0_read_zero
;
4790 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4794 goto mfhc0_read_zero
;
4800 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4804 goto mfhc0_read_zero
;
4810 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4811 ctx
->CP0_LLAddr_shift
);
4815 goto mfhc0_read_zero
;
4824 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4828 goto mfhc0_read_zero
;
4832 goto mfhc0_read_zero
;
4835 (void)rn
; /* avoid a compiler warning */
4836 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4840 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4841 tcg_gen_movi_tl(arg
, 0);
4844 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4846 const char *rn
= "invalid";
4847 uint64_t mask
= ctx
->PAMask
>> 36;
4849 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4857 tcg_gen_andi_tl(arg
, arg
, mask
);
4858 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4868 tcg_gen_andi_tl(arg
, arg
, mask
);
4869 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4879 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4880 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4881 relevant for modern MIPS cores supporting MTHC0, therefore
4882 treating MTHC0 to LLAddr as NOP. */
4895 tcg_gen_andi_tl(arg
, arg
, mask
);
4896 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4907 (void)rn
; /* avoid a compiler warning */
4909 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4912 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4914 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4915 tcg_gen_movi_tl(arg
, 0);
4917 tcg_gen_movi_tl(arg
, ~0);
4921 #define CP0_CHECK(c) \
4924 goto cp0_unimplemented; \
4928 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4930 const char *rn
= "invalid";
4933 check_insn(ctx
, ISA_MIPS32
);
4939 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4943 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4944 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4948 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4949 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4954 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4963 goto cp0_unimplemented
;
4969 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4970 gen_helper_mfc0_random(arg
, cpu_env
);
4974 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4975 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4979 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4980 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4984 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4989 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4990 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4995 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5000 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5001 rn
= "VPEScheFBack";
5004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5005 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5009 goto cp0_unimplemented
;
5016 TCGv_i64 tmp
= tcg_temp_new_i64();
5017 tcg_gen_ld_i64(tmp
, cpu_env
,
5018 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5019 #if defined(TARGET_MIPS64)
5021 /* Move RI/XI fields to bits 31:30 */
5022 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5023 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5026 gen_move_low32(arg
, tmp
);
5027 tcg_temp_free_i64(tmp
);
5032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5033 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5038 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5043 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5048 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5053 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5058 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5063 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5067 goto cp0_unimplemented
;
5074 TCGv_i64 tmp
= tcg_temp_new_i64();
5075 tcg_gen_ld_i64(tmp
, cpu_env
,
5076 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5077 #if defined(TARGET_MIPS64)
5079 /* Move RI/XI fields to bits 31:30 */
5080 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5081 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5084 gen_move_low32(arg
, tmp
);
5085 tcg_temp_free_i64(tmp
);
5091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5092 rn
= "GlobalNumber";
5095 goto cp0_unimplemented
;
5101 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5102 tcg_gen_ext32s_tl(arg
, arg
);
5106 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5107 rn
= "ContextConfig";
5108 goto cp0_unimplemented
;
5111 CP0_CHECK(ctx
->ulri
);
5112 tcg_gen_ld32s_tl(arg
, cpu_env
,
5113 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5117 goto cp0_unimplemented
;
5123 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5127 check_insn(ctx
, ISA_MIPS32R2
);
5128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5132 goto cp0_unimplemented
;
5138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5142 check_insn(ctx
, ISA_MIPS32R2
);
5143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5152 check_insn(ctx
, ISA_MIPS32R2
);
5153 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5157 check_insn(ctx
, ISA_MIPS32R2
);
5158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5162 check_insn(ctx
, ISA_MIPS32R2
);
5163 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5167 goto cp0_unimplemented
;
5173 check_insn(ctx
, ISA_MIPS32R2
);
5174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5178 goto cp0_unimplemented
;
5184 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5185 tcg_gen_ext32s_tl(arg
, arg
);
5190 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5195 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5199 goto cp0_unimplemented
;
5205 /* Mark as an IO operation because we read the time. */
5206 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5209 gen_helper_mfc0_count(arg
, cpu_env
);
5210 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5213 /* Break the TB to be able to take timer interrupts immediately
5214 after reading count. */
5215 ctx
->bstate
= BS_STOP
;
5218 /* 6,7 are implementation dependent */
5220 goto cp0_unimplemented
;
5226 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5227 tcg_gen_ext32s_tl(arg
, arg
);
5231 goto cp0_unimplemented
;
5237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5240 /* 6,7 are implementation dependent */
5242 goto cp0_unimplemented
;
5248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5252 check_insn(ctx
, ISA_MIPS32R2
);
5253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5257 check_insn(ctx
, ISA_MIPS32R2
);
5258 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5262 check_insn(ctx
, ISA_MIPS32R2
);
5263 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5267 goto cp0_unimplemented
;
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5277 goto cp0_unimplemented
;
5283 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5284 tcg_gen_ext32s_tl(arg
, arg
);
5288 goto cp0_unimplemented
;
5294 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5298 check_insn(ctx
, ISA_MIPS32R2
);
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5303 check_insn(ctx
, ISA_MIPS32R2
);
5304 CP0_CHECK(ctx
->cmgcr
);
5305 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5306 tcg_gen_ext32s_tl(arg
, arg
);
5310 goto cp0_unimplemented
;
5316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5320 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5324 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5328 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5332 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5339 /* 6,7 are implementation dependent */
5341 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5345 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5349 goto cp0_unimplemented
;
5355 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5359 goto cp0_unimplemented
;
5365 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5369 goto cp0_unimplemented
;
5375 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5379 goto cp0_unimplemented
;
5385 #if defined(TARGET_MIPS64)
5386 check_insn(ctx
, ISA_MIPS3
);
5387 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5388 tcg_gen_ext32s_tl(arg
, arg
);
5393 goto cp0_unimplemented
;
5397 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5398 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5405 goto cp0_unimplemented
;
5409 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5410 rn
= "'Diagnostic"; /* implementation dependent */
5415 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5419 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5420 rn
= "TraceControl";
5423 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5424 rn
= "TraceControl2";
5427 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5428 rn
= "UserTraceData";
5431 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5435 goto cp0_unimplemented
;
5442 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5443 tcg_gen_ext32s_tl(arg
, arg
);
5447 goto cp0_unimplemented
;
5453 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5454 rn
= "Performance0";
5457 // gen_helper_mfc0_performance1(arg);
5458 rn
= "Performance1";
5461 // gen_helper_mfc0_performance2(arg);
5462 rn
= "Performance2";
5465 // gen_helper_mfc0_performance3(arg);
5466 rn
= "Performance3";
5469 // gen_helper_mfc0_performance4(arg);
5470 rn
= "Performance4";
5473 // gen_helper_mfc0_performance5(arg);
5474 rn
= "Performance5";
5477 // gen_helper_mfc0_performance6(arg);
5478 rn
= "Performance6";
5481 // gen_helper_mfc0_performance7(arg);
5482 rn
= "Performance7";
5485 goto cp0_unimplemented
;
5489 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5495 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5499 goto cp0_unimplemented
;
5509 TCGv_i64 tmp
= tcg_temp_new_i64();
5510 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5511 gen_move_low32(arg
, tmp
);
5512 tcg_temp_free_i64(tmp
);
5520 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5524 goto cp0_unimplemented
;
5533 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5544 goto cp0_unimplemented
;
5550 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5551 tcg_gen_ext32s_tl(arg
, arg
);
5555 goto cp0_unimplemented
;
5562 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5566 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5567 tcg_gen_ld_tl(arg
, cpu_env
,
5568 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5569 tcg_gen_ext32s_tl(arg
, arg
);
5573 goto cp0_unimplemented
;
5577 goto cp0_unimplemented
;
5579 (void)rn
; /* avoid a compiler warning */
5580 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5584 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5585 gen_mfc0_unimplemented(ctx
, arg
);
5588 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5590 const char *rn
= "invalid";
5593 check_insn(ctx
, ISA_MIPS32
);
5595 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5603 gen_helper_mtc0_index(cpu_env
, arg
);
5607 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5608 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5612 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5617 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5627 goto cp0_unimplemented
;
5637 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5638 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5643 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5647 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5648 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5652 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5653 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5658 tcg_gen_st_tl(arg
, cpu_env
,
5659 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5664 tcg_gen_st_tl(arg
, cpu_env
,
5665 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5666 rn
= "VPEScheFBack";
5669 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5670 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5674 goto cp0_unimplemented
;
5680 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5684 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5685 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5689 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5690 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5694 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5695 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5699 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5700 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5704 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5705 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5709 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5710 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5714 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5715 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5719 goto cp0_unimplemented
;
5725 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5731 rn
= "GlobalNumber";
5734 goto cp0_unimplemented
;
5740 gen_helper_mtc0_context(cpu_env
, arg
);
5744 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5745 rn
= "ContextConfig";
5746 goto cp0_unimplemented
;
5749 CP0_CHECK(ctx
->ulri
);
5750 tcg_gen_st_tl(arg
, cpu_env
,
5751 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5755 goto cp0_unimplemented
;
5761 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5765 check_insn(ctx
, ISA_MIPS32R2
);
5766 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5768 ctx
->bstate
= BS_STOP
;
5771 goto cp0_unimplemented
;
5777 gen_helper_mtc0_wired(cpu_env
, arg
);
5781 check_insn(ctx
, ISA_MIPS32R2
);
5782 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5786 check_insn(ctx
, ISA_MIPS32R2
);
5787 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5791 check_insn(ctx
, ISA_MIPS32R2
);
5792 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5796 check_insn(ctx
, ISA_MIPS32R2
);
5797 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5801 check_insn(ctx
, ISA_MIPS32R2
);
5802 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5806 goto cp0_unimplemented
;
5812 check_insn(ctx
, ISA_MIPS32R2
);
5813 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5814 ctx
->bstate
= BS_STOP
;
5818 goto cp0_unimplemented
;
5836 goto cp0_unimplemented
;
5842 gen_helper_mtc0_count(cpu_env
, arg
);
5845 /* 6,7 are implementation dependent */
5847 goto cp0_unimplemented
;
5853 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5857 goto cp0_unimplemented
;
5863 gen_helper_mtc0_compare(cpu_env
, arg
);
5866 /* 6,7 are implementation dependent */
5868 goto cp0_unimplemented
;
5874 save_cpu_state(ctx
, 1);
5875 gen_helper_mtc0_status(cpu_env
, arg
);
5876 /* BS_STOP isn't good enough here, hflags may have changed. */
5877 gen_save_pc(ctx
->pc
+ 4);
5878 ctx
->bstate
= BS_EXCP
;
5882 check_insn(ctx
, ISA_MIPS32R2
);
5883 gen_helper_mtc0_intctl(cpu_env
, arg
);
5884 /* Stop translation as we may have switched the execution mode */
5885 ctx
->bstate
= BS_STOP
;
5889 check_insn(ctx
, ISA_MIPS32R2
);
5890 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5891 /* Stop translation as we may have switched the execution mode */
5892 ctx
->bstate
= BS_STOP
;
5896 check_insn(ctx
, ISA_MIPS32R2
);
5897 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5898 /* Stop translation as we may have switched the execution mode */
5899 ctx
->bstate
= BS_STOP
;
5903 goto cp0_unimplemented
;
5909 save_cpu_state(ctx
, 1);
5910 gen_helper_mtc0_cause(cpu_env
, arg
);
5914 goto cp0_unimplemented
;
5920 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5924 goto cp0_unimplemented
;
5934 check_insn(ctx
, ISA_MIPS32R2
);
5935 gen_helper_mtc0_ebase(cpu_env
, arg
);
5939 goto cp0_unimplemented
;
5945 gen_helper_mtc0_config0(cpu_env
, arg
);
5947 /* Stop translation as we may have switched the execution mode */
5948 ctx
->bstate
= BS_STOP
;
5951 /* ignored, read only */
5955 gen_helper_mtc0_config2(cpu_env
, arg
);
5957 /* Stop translation as we may have switched the execution mode */
5958 ctx
->bstate
= BS_STOP
;
5961 gen_helper_mtc0_config3(cpu_env
, arg
);
5963 /* Stop translation as we may have switched the execution mode */
5964 ctx
->bstate
= BS_STOP
;
5967 gen_helper_mtc0_config4(cpu_env
, arg
);
5969 ctx
->bstate
= BS_STOP
;
5972 gen_helper_mtc0_config5(cpu_env
, arg
);
5974 /* Stop translation as we may have switched the execution mode */
5975 ctx
->bstate
= BS_STOP
;
5977 /* 6,7 are implementation dependent */
5987 rn
= "Invalid config selector";
5988 goto cp0_unimplemented
;
5994 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5998 goto cp0_unimplemented
;
6004 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6008 goto cp0_unimplemented
;
6014 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6018 goto cp0_unimplemented
;
6024 #if defined(TARGET_MIPS64)
6025 check_insn(ctx
, ISA_MIPS3
);
6026 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6031 goto cp0_unimplemented
;
6035 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6036 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6039 gen_helper_mtc0_framemask(cpu_env
, arg
);
6043 goto cp0_unimplemented
;
6048 rn
= "Diagnostic"; /* implementation dependent */
6053 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6054 /* BS_STOP isn't good enough here, hflags may have changed. */
6055 gen_save_pc(ctx
->pc
+ 4);
6056 ctx
->bstate
= BS_EXCP
;
6060 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6061 rn
= "TraceControl";
6062 /* Stop translation as we may have switched the execution mode */
6063 ctx
->bstate
= BS_STOP
;
6066 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6067 rn
= "TraceControl2";
6068 /* Stop translation as we may have switched the execution mode */
6069 ctx
->bstate
= BS_STOP
;
6072 /* Stop translation as we may have switched the execution mode */
6073 ctx
->bstate
= BS_STOP
;
6074 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6075 rn
= "UserTraceData";
6076 /* Stop translation as we may have switched the execution mode */
6077 ctx
->bstate
= BS_STOP
;
6080 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6081 /* Stop translation as we may have switched the execution mode */
6082 ctx
->bstate
= BS_STOP
;
6086 goto cp0_unimplemented
;
6093 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6097 goto cp0_unimplemented
;
6103 gen_helper_mtc0_performance0(cpu_env
, arg
);
6104 rn
= "Performance0";
6107 // gen_helper_mtc0_performance1(arg);
6108 rn
= "Performance1";
6111 // gen_helper_mtc0_performance2(arg);
6112 rn
= "Performance2";
6115 // gen_helper_mtc0_performance3(arg);
6116 rn
= "Performance3";
6119 // gen_helper_mtc0_performance4(arg);
6120 rn
= "Performance4";
6123 // gen_helper_mtc0_performance5(arg);
6124 rn
= "Performance5";
6127 // gen_helper_mtc0_performance6(arg);
6128 rn
= "Performance6";
6131 // gen_helper_mtc0_performance7(arg);
6132 rn
= "Performance7";
6135 goto cp0_unimplemented
;
6149 goto cp0_unimplemented
;
6158 gen_helper_mtc0_taglo(cpu_env
, arg
);
6165 gen_helper_mtc0_datalo(cpu_env
, arg
);
6169 goto cp0_unimplemented
;
6178 gen_helper_mtc0_taghi(cpu_env
, arg
);
6185 gen_helper_mtc0_datahi(cpu_env
, arg
);
6190 goto cp0_unimplemented
;
6196 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6200 goto cp0_unimplemented
;
6207 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6211 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6212 tcg_gen_st_tl(arg
, cpu_env
,
6213 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6217 goto cp0_unimplemented
;
6219 /* Stop translation as we may have switched the execution mode */
6220 ctx
->bstate
= BS_STOP
;
6223 goto cp0_unimplemented
;
6225 (void)rn
; /* avoid a compiler warning */
6226 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6227 /* For simplicity assume that all writes can cause interrupts. */
6228 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6230 ctx
->bstate
= BS_STOP
;
6235 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6238 #if defined(TARGET_MIPS64)
6239 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6241 const char *rn
= "invalid";
6244 check_insn(ctx
, ISA_MIPS64
);
6250 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6254 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6255 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6259 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6260 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6264 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6265 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6270 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6274 goto cp0_unimplemented
;
6280 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6281 gen_helper_mfc0_random(arg
, cpu_env
);
6285 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6286 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6290 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6295 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6296 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6300 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6301 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6305 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6306 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6310 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6311 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6312 rn
= "VPEScheFBack";
6315 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6316 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6320 goto cp0_unimplemented
;
6326 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6330 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6331 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6335 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6336 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6340 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6341 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6345 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6346 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6350 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6351 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6355 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6356 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6360 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6361 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6365 goto cp0_unimplemented
;
6371 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6376 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6377 rn
= "GlobalNumber";
6380 goto cp0_unimplemented
;
6386 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6390 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6391 rn
= "ContextConfig";
6392 goto cp0_unimplemented
;
6395 CP0_CHECK(ctx
->ulri
);
6396 tcg_gen_ld_tl(arg
, cpu_env
,
6397 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6401 goto cp0_unimplemented
;
6407 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6411 check_insn(ctx
, ISA_MIPS32R2
);
6412 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6416 goto cp0_unimplemented
;
6422 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6426 check_insn(ctx
, ISA_MIPS32R2
);
6427 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6431 check_insn(ctx
, ISA_MIPS32R2
);
6432 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6436 check_insn(ctx
, ISA_MIPS32R2
);
6437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6441 check_insn(ctx
, ISA_MIPS32R2
);
6442 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6446 check_insn(ctx
, ISA_MIPS32R2
);
6447 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6451 goto cp0_unimplemented
;
6457 check_insn(ctx
, ISA_MIPS32R2
);
6458 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6462 goto cp0_unimplemented
;
6468 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6473 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6482 goto cp0_unimplemented
;
6488 /* Mark as an IO operation because we read the time. */
6489 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6492 gen_helper_mfc0_count(arg
, cpu_env
);
6493 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6496 /* Break the TB to be able to take timer interrupts immediately
6497 after reading count. */
6498 ctx
->bstate
= BS_STOP
;
6501 /* 6,7 are implementation dependent */
6503 goto cp0_unimplemented
;
6509 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6513 goto cp0_unimplemented
;
6519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6522 /* 6,7 are implementation dependent */
6524 goto cp0_unimplemented
;
6530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6534 check_insn(ctx
, ISA_MIPS32R2
);
6535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6539 check_insn(ctx
, ISA_MIPS32R2
);
6540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6544 check_insn(ctx
, ISA_MIPS32R2
);
6545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6549 goto cp0_unimplemented
;
6555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6559 goto cp0_unimplemented
;
6565 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6569 goto cp0_unimplemented
;
6575 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6579 check_insn(ctx
, ISA_MIPS32R2
);
6580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6584 check_insn(ctx
, ISA_MIPS32R2
);
6585 CP0_CHECK(ctx
->cmgcr
);
6586 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6590 goto cp0_unimplemented
;
6596 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6600 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6612 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6619 /* 6,7 are implementation dependent */
6621 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6629 goto cp0_unimplemented
;
6635 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6639 goto cp0_unimplemented
;
6645 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6649 goto cp0_unimplemented
;
6655 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6659 goto cp0_unimplemented
;
6665 check_insn(ctx
, ISA_MIPS3
);
6666 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6670 goto cp0_unimplemented
;
6674 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6675 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6682 goto cp0_unimplemented
;
6686 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6687 rn
= "'Diagnostic"; /* implementation dependent */
6692 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6696 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6697 rn
= "TraceControl";
6700 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6701 rn
= "TraceControl2";
6704 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6705 rn
= "UserTraceData";
6708 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6712 goto cp0_unimplemented
;
6719 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6723 goto cp0_unimplemented
;
6729 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6730 rn
= "Performance0";
6733 // gen_helper_dmfc0_performance1(arg);
6734 rn
= "Performance1";
6737 // gen_helper_dmfc0_performance2(arg);
6738 rn
= "Performance2";
6741 // gen_helper_dmfc0_performance3(arg);
6742 rn
= "Performance3";
6745 // gen_helper_dmfc0_performance4(arg);
6746 rn
= "Performance4";
6749 // gen_helper_dmfc0_performance5(arg);
6750 rn
= "Performance5";
6753 // gen_helper_dmfc0_performance6(arg);
6754 rn
= "Performance6";
6757 // gen_helper_dmfc0_performance7(arg);
6758 rn
= "Performance7";
6761 goto cp0_unimplemented
;
6765 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6772 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6776 goto cp0_unimplemented
;
6785 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6796 goto cp0_unimplemented
;
6805 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6812 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6816 goto cp0_unimplemented
;
6822 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6826 goto cp0_unimplemented
;
6833 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6837 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6838 tcg_gen_ld_tl(arg
, cpu_env
,
6839 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6843 goto cp0_unimplemented
;
6847 goto cp0_unimplemented
;
6849 (void)rn
; /* avoid a compiler warning */
6850 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6854 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6855 gen_mfc0_unimplemented(ctx
, arg
);
6858 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6860 const char *rn
= "invalid";
6863 check_insn(ctx
, ISA_MIPS64
);
6865 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6873 gen_helper_mtc0_index(cpu_env
, arg
);
6877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6878 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6887 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6897 goto cp0_unimplemented
;
6907 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6908 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6912 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6913 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6917 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6918 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6922 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6923 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6927 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6928 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6932 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6933 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6934 rn
= "VPEScheFBack";
6937 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6938 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6942 goto cp0_unimplemented
;
6948 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6952 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6953 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6957 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6958 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6962 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6963 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6967 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6968 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6972 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6973 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6977 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6978 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6982 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6983 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6987 goto cp0_unimplemented
;
6993 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6999 rn
= "GlobalNumber";
7002 goto cp0_unimplemented
;
7008 gen_helper_mtc0_context(cpu_env
, arg
);
7012 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7013 rn
= "ContextConfig";
7014 goto cp0_unimplemented
;
7017 CP0_CHECK(ctx
->ulri
);
7018 tcg_gen_st_tl(arg
, cpu_env
,
7019 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7023 goto cp0_unimplemented
;
7029 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7033 check_insn(ctx
, ISA_MIPS32R2
);
7034 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7038 goto cp0_unimplemented
;
7044 gen_helper_mtc0_wired(cpu_env
, arg
);
7048 check_insn(ctx
, ISA_MIPS32R2
);
7049 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7053 check_insn(ctx
, ISA_MIPS32R2
);
7054 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7058 check_insn(ctx
, ISA_MIPS32R2
);
7059 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7063 check_insn(ctx
, ISA_MIPS32R2
);
7064 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7068 check_insn(ctx
, ISA_MIPS32R2
);
7069 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7073 goto cp0_unimplemented
;
7079 check_insn(ctx
, ISA_MIPS32R2
);
7080 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7081 ctx
->bstate
= BS_STOP
;
7085 goto cp0_unimplemented
;
7103 goto cp0_unimplemented
;
7109 gen_helper_mtc0_count(cpu_env
, arg
);
7112 /* 6,7 are implementation dependent */
7114 goto cp0_unimplemented
;
7116 /* Stop translation as we may have switched the execution mode */
7117 ctx
->bstate
= BS_STOP
;
7122 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7126 goto cp0_unimplemented
;
7132 gen_helper_mtc0_compare(cpu_env
, arg
);
7135 /* 6,7 are implementation dependent */
7137 goto cp0_unimplemented
;
7139 /* Stop translation as we may have switched the execution mode */
7140 ctx
->bstate
= BS_STOP
;
7145 save_cpu_state(ctx
, 1);
7146 gen_helper_mtc0_status(cpu_env
, arg
);
7147 /* BS_STOP isn't good enough here, hflags may have changed. */
7148 gen_save_pc(ctx
->pc
+ 4);
7149 ctx
->bstate
= BS_EXCP
;
7153 check_insn(ctx
, ISA_MIPS32R2
);
7154 gen_helper_mtc0_intctl(cpu_env
, arg
);
7155 /* Stop translation as we may have switched the execution mode */
7156 ctx
->bstate
= BS_STOP
;
7160 check_insn(ctx
, ISA_MIPS32R2
);
7161 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7162 /* Stop translation as we may have switched the execution mode */
7163 ctx
->bstate
= BS_STOP
;
7167 check_insn(ctx
, ISA_MIPS32R2
);
7168 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7169 /* Stop translation as we may have switched the execution mode */
7170 ctx
->bstate
= BS_STOP
;
7174 goto cp0_unimplemented
;
7180 save_cpu_state(ctx
, 1);
7181 /* Mark as an IO operation because we may trigger a software
7183 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7186 gen_helper_mtc0_cause(cpu_env
, arg
);
7187 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7190 /* Stop translation as we may have triggered an intetrupt */
7191 ctx
->bstate
= BS_STOP
;
7195 goto cp0_unimplemented
;
7201 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7205 goto cp0_unimplemented
;
7215 check_insn(ctx
, ISA_MIPS32R2
);
7216 gen_helper_mtc0_ebase(cpu_env
, arg
);
7220 goto cp0_unimplemented
;
7226 gen_helper_mtc0_config0(cpu_env
, arg
);
7228 /* Stop translation as we may have switched the execution mode */
7229 ctx
->bstate
= BS_STOP
;
7232 /* ignored, read only */
7236 gen_helper_mtc0_config2(cpu_env
, arg
);
7238 /* Stop translation as we may have switched the execution mode */
7239 ctx
->bstate
= BS_STOP
;
7242 gen_helper_mtc0_config3(cpu_env
, arg
);
7244 /* Stop translation as we may have switched the execution mode */
7245 ctx
->bstate
= BS_STOP
;
7248 /* currently ignored */
7252 gen_helper_mtc0_config5(cpu_env
, arg
);
7254 /* Stop translation as we may have switched the execution mode */
7255 ctx
->bstate
= BS_STOP
;
7257 /* 6,7 are implementation dependent */
7259 rn
= "Invalid config selector";
7260 goto cp0_unimplemented
;
7266 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7270 goto cp0_unimplemented
;
7276 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7280 goto cp0_unimplemented
;
7286 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7290 goto cp0_unimplemented
;
7296 check_insn(ctx
, ISA_MIPS3
);
7297 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7301 goto cp0_unimplemented
;
7305 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7306 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7309 gen_helper_mtc0_framemask(cpu_env
, arg
);
7313 goto cp0_unimplemented
;
7318 rn
= "Diagnostic"; /* implementation dependent */
7323 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7324 /* BS_STOP isn't good enough here, hflags may have changed. */
7325 gen_save_pc(ctx
->pc
+ 4);
7326 ctx
->bstate
= BS_EXCP
;
7330 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7331 /* Stop translation as we may have switched the execution mode */
7332 ctx
->bstate
= BS_STOP
;
7333 rn
= "TraceControl";
7336 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7337 /* Stop translation as we may have switched the execution mode */
7338 ctx
->bstate
= BS_STOP
;
7339 rn
= "TraceControl2";
7342 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7343 /* Stop translation as we may have switched the execution mode */
7344 ctx
->bstate
= BS_STOP
;
7345 rn
= "UserTraceData";
7348 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7349 /* Stop translation as we may have switched the execution mode */
7350 ctx
->bstate
= BS_STOP
;
7354 goto cp0_unimplemented
;
7361 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7365 goto cp0_unimplemented
;
7371 gen_helper_mtc0_performance0(cpu_env
, arg
);
7372 rn
= "Performance0";
7375 // gen_helper_mtc0_performance1(cpu_env, arg);
7376 rn
= "Performance1";
7379 // gen_helper_mtc0_performance2(cpu_env, arg);
7380 rn
= "Performance2";
7383 // gen_helper_mtc0_performance3(cpu_env, arg);
7384 rn
= "Performance3";
7387 // gen_helper_mtc0_performance4(cpu_env, arg);
7388 rn
= "Performance4";
7391 // gen_helper_mtc0_performance5(cpu_env, arg);
7392 rn
= "Performance5";
7395 // gen_helper_mtc0_performance6(cpu_env, arg);
7396 rn
= "Performance6";
7399 // gen_helper_mtc0_performance7(cpu_env, arg);
7400 rn
= "Performance7";
7403 goto cp0_unimplemented
;
7417 goto cp0_unimplemented
;
7426 gen_helper_mtc0_taglo(cpu_env
, arg
);
7433 gen_helper_mtc0_datalo(cpu_env
, arg
);
7437 goto cp0_unimplemented
;
7446 gen_helper_mtc0_taghi(cpu_env
, arg
);
7453 gen_helper_mtc0_datahi(cpu_env
, arg
);
7458 goto cp0_unimplemented
;
7464 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7468 goto cp0_unimplemented
;
7475 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7479 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7480 tcg_gen_st_tl(arg
, cpu_env
,
7481 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7485 goto cp0_unimplemented
;
7487 /* Stop translation as we may have switched the execution mode */
7488 ctx
->bstate
= BS_STOP
;
7491 goto cp0_unimplemented
;
7493 (void)rn
; /* avoid a compiler warning */
7494 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7495 /* For simplicity assume that all writes can cause interrupts. */
7496 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7498 ctx
->bstate
= BS_STOP
;
7503 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7505 #endif /* TARGET_MIPS64 */
7507 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7508 int u
, int sel
, int h
)
7510 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7511 TCGv t0
= tcg_temp_local_new();
7513 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7514 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7515 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7516 tcg_gen_movi_tl(t0
, -1);
7517 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7518 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7519 tcg_gen_movi_tl(t0
, -1);
7525 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7528 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7538 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7541 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7544 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7547 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7550 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7553 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7556 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7559 gen_mfc0(ctx
, t0
, rt
, sel
);
7566 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7569 gen_mfc0(ctx
, t0
, rt
, sel
);
7575 gen_helper_mftc0_status(t0
, cpu_env
);
7578 gen_mfc0(ctx
, t0
, rt
, sel
);
7584 gen_helper_mftc0_cause(t0
, cpu_env
);
7594 gen_helper_mftc0_epc(t0
, cpu_env
);
7604 gen_helper_mftc0_ebase(t0
, cpu_env
);
7614 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7624 gen_helper_mftc0_debug(t0
, cpu_env
);
7627 gen_mfc0(ctx
, t0
, rt
, sel
);
7632 gen_mfc0(ctx
, t0
, rt
, sel
);
7634 } else switch (sel
) {
7635 /* GPR registers. */
7637 gen_helper_1e0i(mftgpr
, t0
, rt
);
7639 /* Auxiliary CPU registers */
7643 gen_helper_1e0i(mftlo
, t0
, 0);
7646 gen_helper_1e0i(mfthi
, t0
, 0);
7649 gen_helper_1e0i(mftacx
, t0
, 0);
7652 gen_helper_1e0i(mftlo
, t0
, 1);
7655 gen_helper_1e0i(mfthi
, t0
, 1);
7658 gen_helper_1e0i(mftacx
, t0
, 1);
7661 gen_helper_1e0i(mftlo
, t0
, 2);
7664 gen_helper_1e0i(mfthi
, t0
, 2);
7667 gen_helper_1e0i(mftacx
, t0
, 2);
7670 gen_helper_1e0i(mftlo
, t0
, 3);
7673 gen_helper_1e0i(mfthi
, t0
, 3);
7676 gen_helper_1e0i(mftacx
, t0
, 3);
7679 gen_helper_mftdsp(t0
, cpu_env
);
7685 /* Floating point (COP1). */
7687 /* XXX: For now we support only a single FPU context. */
7689 TCGv_i32 fp0
= tcg_temp_new_i32();
7691 gen_load_fpr32(ctx
, fp0
, rt
);
7692 tcg_gen_ext_i32_tl(t0
, fp0
);
7693 tcg_temp_free_i32(fp0
);
7695 TCGv_i32 fp0
= tcg_temp_new_i32();
7697 gen_load_fpr32h(ctx
, fp0
, rt
);
7698 tcg_gen_ext_i32_tl(t0
, fp0
);
7699 tcg_temp_free_i32(fp0
);
7703 /* XXX: For now we support only a single FPU context. */
7704 gen_helper_1e0i(cfc1
, t0
, rt
);
7706 /* COP2: Not implemented. */
7713 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7714 gen_store_gpr(t0
, rd
);
7720 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7721 generate_exception_end(ctx
, EXCP_RI
);
7724 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7725 int u
, int sel
, int h
)
7727 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7728 TCGv t0
= tcg_temp_local_new();
7730 gen_load_gpr(t0
, rt
);
7731 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7732 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7733 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7735 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7736 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7743 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7746 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7756 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7759 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7762 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7765 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7768 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7771 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7774 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7777 gen_mtc0(ctx
, t0
, rd
, sel
);
7784 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7787 gen_mtc0(ctx
, t0
, rd
, sel
);
7793 gen_helper_mttc0_status(cpu_env
, t0
);
7796 gen_mtc0(ctx
, t0
, rd
, sel
);
7802 gen_helper_mttc0_cause(cpu_env
, t0
);
7812 gen_helper_mttc0_ebase(cpu_env
, t0
);
7822 gen_helper_mttc0_debug(cpu_env
, t0
);
7825 gen_mtc0(ctx
, t0
, rd
, sel
);
7830 gen_mtc0(ctx
, t0
, rd
, sel
);
7832 } else switch (sel
) {
7833 /* GPR registers. */
7835 gen_helper_0e1i(mttgpr
, t0
, rd
);
7837 /* Auxiliary CPU registers */
7841 gen_helper_0e1i(mttlo
, t0
, 0);
7844 gen_helper_0e1i(mtthi
, t0
, 0);
7847 gen_helper_0e1i(mttacx
, t0
, 0);
7850 gen_helper_0e1i(mttlo
, t0
, 1);
7853 gen_helper_0e1i(mtthi
, t0
, 1);
7856 gen_helper_0e1i(mttacx
, t0
, 1);
7859 gen_helper_0e1i(mttlo
, t0
, 2);
7862 gen_helper_0e1i(mtthi
, t0
, 2);
7865 gen_helper_0e1i(mttacx
, t0
, 2);
7868 gen_helper_0e1i(mttlo
, t0
, 3);
7871 gen_helper_0e1i(mtthi
, t0
, 3);
7874 gen_helper_0e1i(mttacx
, t0
, 3);
7877 gen_helper_mttdsp(cpu_env
, t0
);
7883 /* Floating point (COP1). */
7885 /* XXX: For now we support only a single FPU context. */
7887 TCGv_i32 fp0
= tcg_temp_new_i32();
7889 tcg_gen_trunc_tl_i32(fp0
, t0
);
7890 gen_store_fpr32(ctx
, fp0
, rd
);
7891 tcg_temp_free_i32(fp0
);
7893 TCGv_i32 fp0
= tcg_temp_new_i32();
7895 tcg_gen_trunc_tl_i32(fp0
, t0
);
7896 gen_store_fpr32h(ctx
, fp0
, rd
);
7897 tcg_temp_free_i32(fp0
);
7901 /* XXX: For now we support only a single FPU context. */
7903 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7905 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7906 tcg_temp_free_i32(fs_tmp
);
7908 /* Stop translation as we may have changed hflags */
7909 ctx
->bstate
= BS_STOP
;
7911 /* COP2: Not implemented. */
7918 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7924 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7925 generate_exception_end(ctx
, EXCP_RI
);
7928 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7930 const char *opn
= "ldst";
7932 check_cp0_enabled(ctx
);
7939 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7944 TCGv t0
= tcg_temp_new();
7946 gen_load_gpr(t0
, rt
);
7947 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7952 #if defined(TARGET_MIPS64)
7954 check_insn(ctx
, ISA_MIPS3
);
7959 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7963 check_insn(ctx
, ISA_MIPS3
);
7965 TCGv t0
= tcg_temp_new();
7967 gen_load_gpr(t0
, rt
);
7968 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7980 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7986 TCGv t0
= tcg_temp_new();
7987 gen_load_gpr(t0
, rt
);
7988 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7994 check_insn(ctx
, ASE_MT
);
7999 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8000 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8004 check_insn(ctx
, ASE_MT
);
8005 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8006 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8011 if (!env
->tlb
->helper_tlbwi
)
8013 gen_helper_tlbwi(cpu_env
);
8018 if (!env
->tlb
->helper_tlbinv
) {
8021 gen_helper_tlbinv(cpu_env
);
8022 } /* treat as nop if TLBINV not supported */
8027 if (!env
->tlb
->helper_tlbinvf
) {
8030 gen_helper_tlbinvf(cpu_env
);
8031 } /* treat as nop if TLBINV not supported */
8035 if (!env
->tlb
->helper_tlbwr
)
8037 gen_helper_tlbwr(cpu_env
);
8041 if (!env
->tlb
->helper_tlbp
)
8043 gen_helper_tlbp(cpu_env
);
8047 if (!env
->tlb
->helper_tlbr
)
8049 gen_helper_tlbr(cpu_env
);
8051 case OPC_ERET
: /* OPC_ERETNC */
8052 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8053 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8056 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8057 if (ctx
->opcode
& (1 << bit_shift
)) {
8060 check_insn(ctx
, ISA_MIPS32R5
);
8061 gen_helper_eretnc(cpu_env
);
8065 check_insn(ctx
, ISA_MIPS2
);
8066 gen_helper_eret(cpu_env
);
8068 ctx
->bstate
= BS_EXCP
;
8073 check_insn(ctx
, ISA_MIPS32
);
8074 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8075 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8078 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8080 generate_exception_end(ctx
, EXCP_RI
);
8082 gen_helper_deret(cpu_env
);
8083 ctx
->bstate
= BS_EXCP
;
8088 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8089 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8090 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8093 /* If we get an exception, we want to restart at next instruction */
8095 save_cpu_state(ctx
, 1);
8097 gen_helper_wait(cpu_env
);
8098 ctx
->bstate
= BS_EXCP
;
8103 generate_exception_end(ctx
, EXCP_RI
);
8106 (void)opn
; /* avoid a compiler warning */
8108 #endif /* !CONFIG_USER_ONLY */
8110 /* CP1 Branches (before delay slot) */
8111 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8112 int32_t cc
, int32_t offset
)
8114 target_ulong btarget
;
8115 TCGv_i32 t0
= tcg_temp_new_i32();
8117 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8118 generate_exception_end(ctx
, EXCP_RI
);
8123 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8125 btarget
= ctx
->pc
+ 4 + offset
;
8129 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8130 tcg_gen_not_i32(t0
, t0
);
8131 tcg_gen_andi_i32(t0
, t0
, 1);
8132 tcg_gen_extu_i32_tl(bcond
, t0
);
8135 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8136 tcg_gen_not_i32(t0
, t0
);
8137 tcg_gen_andi_i32(t0
, t0
, 1);
8138 tcg_gen_extu_i32_tl(bcond
, t0
);
8141 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8142 tcg_gen_andi_i32(t0
, t0
, 1);
8143 tcg_gen_extu_i32_tl(bcond
, t0
);
8146 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8147 tcg_gen_andi_i32(t0
, t0
, 1);
8148 tcg_gen_extu_i32_tl(bcond
, t0
);
8150 ctx
->hflags
|= MIPS_HFLAG_BL
;
8154 TCGv_i32 t1
= tcg_temp_new_i32();
8155 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8156 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8157 tcg_gen_nand_i32(t0
, t0
, t1
);
8158 tcg_temp_free_i32(t1
);
8159 tcg_gen_andi_i32(t0
, t0
, 1);
8160 tcg_gen_extu_i32_tl(bcond
, t0
);
8165 TCGv_i32 t1
= tcg_temp_new_i32();
8166 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8167 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8168 tcg_gen_or_i32(t0
, t0
, t1
);
8169 tcg_temp_free_i32(t1
);
8170 tcg_gen_andi_i32(t0
, t0
, 1);
8171 tcg_gen_extu_i32_tl(bcond
, t0
);
8176 TCGv_i32 t1
= tcg_temp_new_i32();
8177 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8178 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8179 tcg_gen_and_i32(t0
, t0
, t1
);
8180 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8181 tcg_gen_and_i32(t0
, t0
, t1
);
8182 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8183 tcg_gen_nand_i32(t0
, t0
, t1
);
8184 tcg_temp_free_i32(t1
);
8185 tcg_gen_andi_i32(t0
, t0
, 1);
8186 tcg_gen_extu_i32_tl(bcond
, t0
);
8191 TCGv_i32 t1
= tcg_temp_new_i32();
8192 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8193 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8194 tcg_gen_or_i32(t0
, t0
, t1
);
8195 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8196 tcg_gen_or_i32(t0
, t0
, t1
);
8197 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8198 tcg_gen_or_i32(t0
, t0
, t1
);
8199 tcg_temp_free_i32(t1
);
8200 tcg_gen_andi_i32(t0
, t0
, 1);
8201 tcg_gen_extu_i32_tl(bcond
, t0
);
8204 ctx
->hflags
|= MIPS_HFLAG_BC
;
8207 MIPS_INVAL("cp1 cond branch");
8208 generate_exception_end(ctx
, EXCP_RI
);
8211 ctx
->btarget
= btarget
;
8212 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8214 tcg_temp_free_i32(t0
);
8217 /* R6 CP1 Branches */
8218 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8219 int32_t ft
, int32_t offset
,
8222 target_ulong btarget
;
8223 TCGv_i64 t0
= tcg_temp_new_i64();
8225 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8226 #ifdef MIPS_DEBUG_DISAS
8227 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8230 generate_exception_end(ctx
, EXCP_RI
);
8234 gen_load_fpr64(ctx
, t0
, ft
);
8235 tcg_gen_andi_i64(t0
, t0
, 1);
8237 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8241 tcg_gen_xori_i64(t0
, t0
, 1);
8242 ctx
->hflags
|= MIPS_HFLAG_BC
;
8245 /* t0 already set */
8246 ctx
->hflags
|= MIPS_HFLAG_BC
;
8249 MIPS_INVAL("cp1 cond branch");
8250 generate_exception_end(ctx
, EXCP_RI
);
8254 tcg_gen_trunc_i64_tl(bcond
, t0
);
8256 ctx
->btarget
= btarget
;
8258 switch (delayslot_size
) {
8260 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8263 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8268 tcg_temp_free_i64(t0
);
8271 /* Coprocessor 1 (FPU) */
8273 #define FOP(func, fmt) (((fmt) << 21) | (func))
8276 OPC_ADD_S
= FOP(0, FMT_S
),
8277 OPC_SUB_S
= FOP(1, FMT_S
),
8278 OPC_MUL_S
= FOP(2, FMT_S
),
8279 OPC_DIV_S
= FOP(3, FMT_S
),
8280 OPC_SQRT_S
= FOP(4, FMT_S
),
8281 OPC_ABS_S
= FOP(5, FMT_S
),
8282 OPC_MOV_S
= FOP(6, FMT_S
),
8283 OPC_NEG_S
= FOP(7, FMT_S
),
8284 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8285 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8286 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8287 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8288 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8289 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8290 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8291 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8292 OPC_SEL_S
= FOP(16, FMT_S
),
8293 OPC_MOVCF_S
= FOP(17, FMT_S
),
8294 OPC_MOVZ_S
= FOP(18, FMT_S
),
8295 OPC_MOVN_S
= FOP(19, FMT_S
),
8296 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8297 OPC_RECIP_S
= FOP(21, FMT_S
),
8298 OPC_RSQRT_S
= FOP(22, FMT_S
),
8299 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8300 OPC_MADDF_S
= FOP(24, FMT_S
),
8301 OPC_MSUBF_S
= FOP(25, FMT_S
),
8302 OPC_RINT_S
= FOP(26, FMT_S
),
8303 OPC_CLASS_S
= FOP(27, FMT_S
),
8304 OPC_MIN_S
= FOP(28, FMT_S
),
8305 OPC_RECIP2_S
= FOP(28, FMT_S
),
8306 OPC_MINA_S
= FOP(29, FMT_S
),
8307 OPC_RECIP1_S
= FOP(29, FMT_S
),
8308 OPC_MAX_S
= FOP(30, FMT_S
),
8309 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8310 OPC_MAXA_S
= FOP(31, FMT_S
),
8311 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8312 OPC_CVT_D_S
= FOP(33, FMT_S
),
8313 OPC_CVT_W_S
= FOP(36, FMT_S
),
8314 OPC_CVT_L_S
= FOP(37, FMT_S
),
8315 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8316 OPC_CMP_F_S
= FOP (48, FMT_S
),
8317 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8318 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8319 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8320 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8321 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8322 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8323 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8324 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8325 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8326 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8327 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8328 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8329 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8330 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8331 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8333 OPC_ADD_D
= FOP(0, FMT_D
),
8334 OPC_SUB_D
= FOP(1, FMT_D
),
8335 OPC_MUL_D
= FOP(2, FMT_D
),
8336 OPC_DIV_D
= FOP(3, FMT_D
),
8337 OPC_SQRT_D
= FOP(4, FMT_D
),
8338 OPC_ABS_D
= FOP(5, FMT_D
),
8339 OPC_MOV_D
= FOP(6, FMT_D
),
8340 OPC_NEG_D
= FOP(7, FMT_D
),
8341 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8342 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8343 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8344 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8345 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8346 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8347 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8348 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8349 OPC_SEL_D
= FOP(16, FMT_D
),
8350 OPC_MOVCF_D
= FOP(17, FMT_D
),
8351 OPC_MOVZ_D
= FOP(18, FMT_D
),
8352 OPC_MOVN_D
= FOP(19, FMT_D
),
8353 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8354 OPC_RECIP_D
= FOP(21, FMT_D
),
8355 OPC_RSQRT_D
= FOP(22, FMT_D
),
8356 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8357 OPC_MADDF_D
= FOP(24, FMT_D
),
8358 OPC_MSUBF_D
= FOP(25, FMT_D
),
8359 OPC_RINT_D
= FOP(26, FMT_D
),
8360 OPC_CLASS_D
= FOP(27, FMT_D
),
8361 OPC_MIN_D
= FOP(28, FMT_D
),
8362 OPC_RECIP2_D
= FOP(28, FMT_D
),
8363 OPC_MINA_D
= FOP(29, FMT_D
),
8364 OPC_RECIP1_D
= FOP(29, FMT_D
),
8365 OPC_MAX_D
= FOP(30, FMT_D
),
8366 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8367 OPC_MAXA_D
= FOP(31, FMT_D
),
8368 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8369 OPC_CVT_S_D
= FOP(32, FMT_D
),
8370 OPC_CVT_W_D
= FOP(36, FMT_D
),
8371 OPC_CVT_L_D
= FOP(37, FMT_D
),
8372 OPC_CMP_F_D
= FOP (48, FMT_D
),
8373 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8374 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8375 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8376 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8377 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8378 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8379 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8380 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8381 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8382 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8383 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8384 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8385 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8386 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8387 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8389 OPC_CVT_S_W
= FOP(32, FMT_W
),
8390 OPC_CVT_D_W
= FOP(33, FMT_W
),
8391 OPC_CVT_S_L
= FOP(32, FMT_L
),
8392 OPC_CVT_D_L
= FOP(33, FMT_L
),
8393 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8395 OPC_ADD_PS
= FOP(0, FMT_PS
),
8396 OPC_SUB_PS
= FOP(1, FMT_PS
),
8397 OPC_MUL_PS
= FOP(2, FMT_PS
),
8398 OPC_DIV_PS
= FOP(3, FMT_PS
),
8399 OPC_ABS_PS
= FOP(5, FMT_PS
),
8400 OPC_MOV_PS
= FOP(6, FMT_PS
),
8401 OPC_NEG_PS
= FOP(7, FMT_PS
),
8402 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8403 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8404 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8405 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8406 OPC_MULR_PS
= FOP(26, FMT_PS
),
8407 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8408 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8409 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8410 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8412 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8413 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8414 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8415 OPC_PLL_PS
= FOP(44, FMT_PS
),
8416 OPC_PLU_PS
= FOP(45, FMT_PS
),
8417 OPC_PUL_PS
= FOP(46, FMT_PS
),
8418 OPC_PUU_PS
= FOP(47, FMT_PS
),
8419 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8420 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8421 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8422 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8423 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8424 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8425 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8426 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8427 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8428 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8429 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8430 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8431 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8432 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8433 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8434 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8438 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8439 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8440 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8441 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8442 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8443 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8444 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8445 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8446 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8447 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8448 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8449 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8450 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8451 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8452 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8453 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8454 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8455 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8456 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8457 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8458 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8459 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8461 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8462 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8463 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8464 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8465 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8466 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8467 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8468 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8469 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8470 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8471 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8472 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8473 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8474 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8475 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8476 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8477 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8478 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8479 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8480 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8481 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8482 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8484 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8486 TCGv t0
= tcg_temp_new();
8491 TCGv_i32 fp0
= tcg_temp_new_i32();
8493 gen_load_fpr32(ctx
, fp0
, fs
);
8494 tcg_gen_ext_i32_tl(t0
, fp0
);
8495 tcg_temp_free_i32(fp0
);
8497 gen_store_gpr(t0
, rt
);
8500 gen_load_gpr(t0
, rt
);
8502 TCGv_i32 fp0
= tcg_temp_new_i32();
8504 tcg_gen_trunc_tl_i32(fp0
, t0
);
8505 gen_store_fpr32(ctx
, fp0
, fs
);
8506 tcg_temp_free_i32(fp0
);
8510 gen_helper_1e0i(cfc1
, t0
, fs
);
8511 gen_store_gpr(t0
, rt
);
8514 gen_load_gpr(t0
, rt
);
8515 save_cpu_state(ctx
, 0);
8517 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8519 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8520 tcg_temp_free_i32(fs_tmp
);
8522 /* Stop translation as we may have changed hflags */
8523 ctx
->bstate
= BS_STOP
;
8525 #if defined(TARGET_MIPS64)
8527 gen_load_fpr64(ctx
, t0
, fs
);
8528 gen_store_gpr(t0
, rt
);
8531 gen_load_gpr(t0
, rt
);
8532 gen_store_fpr64(ctx
, t0
, fs
);
8537 TCGv_i32 fp0
= tcg_temp_new_i32();
8539 gen_load_fpr32h(ctx
, fp0
, fs
);
8540 tcg_gen_ext_i32_tl(t0
, fp0
);
8541 tcg_temp_free_i32(fp0
);
8543 gen_store_gpr(t0
, rt
);
8546 gen_load_gpr(t0
, rt
);
8548 TCGv_i32 fp0
= tcg_temp_new_i32();
8550 tcg_gen_trunc_tl_i32(fp0
, t0
);
8551 gen_store_fpr32h(ctx
, fp0
, fs
);
8552 tcg_temp_free_i32(fp0
);
8556 MIPS_INVAL("cp1 move");
8557 generate_exception_end(ctx
, EXCP_RI
);
8565 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8581 l1
= gen_new_label();
8582 t0
= tcg_temp_new_i32();
8583 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8584 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8585 tcg_temp_free_i32(t0
);
8587 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8589 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8594 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8598 TCGv_i32 t0
= tcg_temp_new_i32();
8599 TCGLabel
*l1
= gen_new_label();
8606 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8607 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8608 gen_load_fpr32(ctx
, t0
, fs
);
8609 gen_store_fpr32(ctx
, t0
, fd
);
8611 tcg_temp_free_i32(t0
);
8614 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8617 TCGv_i32 t0
= tcg_temp_new_i32();
8619 TCGLabel
*l1
= gen_new_label();
8626 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8627 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8628 tcg_temp_free_i32(t0
);
8629 fp0
= tcg_temp_new_i64();
8630 gen_load_fpr64(ctx
, fp0
, fs
);
8631 gen_store_fpr64(ctx
, fp0
, fd
);
8632 tcg_temp_free_i64(fp0
);
8636 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8640 TCGv_i32 t0
= tcg_temp_new_i32();
8641 TCGLabel
*l1
= gen_new_label();
8642 TCGLabel
*l2
= gen_new_label();
8649 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8650 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8651 gen_load_fpr32(ctx
, t0
, fs
);
8652 gen_store_fpr32(ctx
, t0
, fd
);
8655 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8656 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8657 gen_load_fpr32h(ctx
, t0
, fs
);
8658 gen_store_fpr32h(ctx
, t0
, fd
);
8659 tcg_temp_free_i32(t0
);
8663 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8666 TCGv_i32 t1
= tcg_const_i32(0);
8667 TCGv_i32 fp0
= tcg_temp_new_i32();
8668 TCGv_i32 fp1
= tcg_temp_new_i32();
8669 TCGv_i32 fp2
= tcg_temp_new_i32();
8670 gen_load_fpr32(ctx
, fp0
, fd
);
8671 gen_load_fpr32(ctx
, fp1
, ft
);
8672 gen_load_fpr32(ctx
, fp2
, fs
);
8676 tcg_gen_andi_i32(fp0
, fp0
, 1);
8677 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8680 tcg_gen_andi_i32(fp1
, fp1
, 1);
8681 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8684 tcg_gen_andi_i32(fp1
, fp1
, 1);
8685 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8688 MIPS_INVAL("gen_sel_s");
8689 generate_exception_end(ctx
, EXCP_RI
);
8693 gen_store_fpr32(ctx
, fp0
, fd
);
8694 tcg_temp_free_i32(fp2
);
8695 tcg_temp_free_i32(fp1
);
8696 tcg_temp_free_i32(fp0
);
8697 tcg_temp_free_i32(t1
);
8700 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8703 TCGv_i64 t1
= tcg_const_i64(0);
8704 TCGv_i64 fp0
= tcg_temp_new_i64();
8705 TCGv_i64 fp1
= tcg_temp_new_i64();
8706 TCGv_i64 fp2
= tcg_temp_new_i64();
8707 gen_load_fpr64(ctx
, fp0
, fd
);
8708 gen_load_fpr64(ctx
, fp1
, ft
);
8709 gen_load_fpr64(ctx
, fp2
, fs
);
8713 tcg_gen_andi_i64(fp0
, fp0
, 1);
8714 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8717 tcg_gen_andi_i64(fp1
, fp1
, 1);
8718 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8721 tcg_gen_andi_i64(fp1
, fp1
, 1);
8722 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8725 MIPS_INVAL("gen_sel_d");
8726 generate_exception_end(ctx
, EXCP_RI
);
8730 gen_store_fpr64(ctx
, fp0
, fd
);
8731 tcg_temp_free_i64(fp2
);
8732 tcg_temp_free_i64(fp1
);
8733 tcg_temp_free_i64(fp0
);
8734 tcg_temp_free_i64(t1
);
8737 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8738 int ft
, int fs
, int fd
, int cc
)
8740 uint32_t func
= ctx
->opcode
& 0x3f;
8744 TCGv_i32 fp0
= tcg_temp_new_i32();
8745 TCGv_i32 fp1
= tcg_temp_new_i32();
8747 gen_load_fpr32(ctx
, fp0
, fs
);
8748 gen_load_fpr32(ctx
, fp1
, ft
);
8749 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8750 tcg_temp_free_i32(fp1
);
8751 gen_store_fpr32(ctx
, fp0
, fd
);
8752 tcg_temp_free_i32(fp0
);
8757 TCGv_i32 fp0
= tcg_temp_new_i32();
8758 TCGv_i32 fp1
= tcg_temp_new_i32();
8760 gen_load_fpr32(ctx
, fp0
, fs
);
8761 gen_load_fpr32(ctx
, fp1
, ft
);
8762 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8763 tcg_temp_free_i32(fp1
);
8764 gen_store_fpr32(ctx
, fp0
, fd
);
8765 tcg_temp_free_i32(fp0
);
8770 TCGv_i32 fp0
= tcg_temp_new_i32();
8771 TCGv_i32 fp1
= tcg_temp_new_i32();
8773 gen_load_fpr32(ctx
, fp0
, fs
);
8774 gen_load_fpr32(ctx
, fp1
, ft
);
8775 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8776 tcg_temp_free_i32(fp1
);
8777 gen_store_fpr32(ctx
, fp0
, fd
);
8778 tcg_temp_free_i32(fp0
);
8783 TCGv_i32 fp0
= tcg_temp_new_i32();
8784 TCGv_i32 fp1
= tcg_temp_new_i32();
8786 gen_load_fpr32(ctx
, fp0
, fs
);
8787 gen_load_fpr32(ctx
, fp1
, ft
);
8788 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8789 tcg_temp_free_i32(fp1
);
8790 gen_store_fpr32(ctx
, fp0
, fd
);
8791 tcg_temp_free_i32(fp0
);
8796 TCGv_i32 fp0
= tcg_temp_new_i32();
8798 gen_load_fpr32(ctx
, fp0
, fs
);
8799 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8800 gen_store_fpr32(ctx
, fp0
, fd
);
8801 tcg_temp_free_i32(fp0
);
8806 TCGv_i32 fp0
= tcg_temp_new_i32();
8808 gen_load_fpr32(ctx
, fp0
, fs
);
8809 gen_helper_float_abs_s(fp0
, fp0
);
8810 gen_store_fpr32(ctx
, fp0
, fd
);
8811 tcg_temp_free_i32(fp0
);
8816 TCGv_i32 fp0
= tcg_temp_new_i32();
8818 gen_load_fpr32(ctx
, fp0
, fs
);
8819 gen_store_fpr32(ctx
, fp0
, fd
);
8820 tcg_temp_free_i32(fp0
);
8825 TCGv_i32 fp0
= tcg_temp_new_i32();
8827 gen_load_fpr32(ctx
, fp0
, fs
);
8828 gen_helper_float_chs_s(fp0
, fp0
);
8829 gen_store_fpr32(ctx
, fp0
, fd
);
8830 tcg_temp_free_i32(fp0
);
8834 check_cp1_64bitmode(ctx
);
8836 TCGv_i32 fp32
= tcg_temp_new_i32();
8837 TCGv_i64 fp64
= tcg_temp_new_i64();
8839 gen_load_fpr32(ctx
, fp32
, fs
);
8840 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8841 tcg_temp_free_i32(fp32
);
8842 gen_store_fpr64(ctx
, fp64
, fd
);
8843 tcg_temp_free_i64(fp64
);
8847 check_cp1_64bitmode(ctx
);
8849 TCGv_i32 fp32
= tcg_temp_new_i32();
8850 TCGv_i64 fp64
= tcg_temp_new_i64();
8852 gen_load_fpr32(ctx
, fp32
, fs
);
8853 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8854 tcg_temp_free_i32(fp32
);
8855 gen_store_fpr64(ctx
, fp64
, fd
);
8856 tcg_temp_free_i64(fp64
);
8860 check_cp1_64bitmode(ctx
);
8862 TCGv_i32 fp32
= tcg_temp_new_i32();
8863 TCGv_i64 fp64
= tcg_temp_new_i64();
8865 gen_load_fpr32(ctx
, fp32
, fs
);
8866 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8867 tcg_temp_free_i32(fp32
);
8868 gen_store_fpr64(ctx
, fp64
, fd
);
8869 tcg_temp_free_i64(fp64
);
8873 check_cp1_64bitmode(ctx
);
8875 TCGv_i32 fp32
= tcg_temp_new_i32();
8876 TCGv_i64 fp64
= tcg_temp_new_i64();
8878 gen_load_fpr32(ctx
, fp32
, fs
);
8879 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8880 tcg_temp_free_i32(fp32
);
8881 gen_store_fpr64(ctx
, fp64
, fd
);
8882 tcg_temp_free_i64(fp64
);
8887 TCGv_i32 fp0
= tcg_temp_new_i32();
8889 gen_load_fpr32(ctx
, fp0
, fs
);
8890 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8891 gen_store_fpr32(ctx
, fp0
, fd
);
8892 tcg_temp_free_i32(fp0
);
8897 TCGv_i32 fp0
= tcg_temp_new_i32();
8899 gen_load_fpr32(ctx
, fp0
, fs
);
8900 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8901 gen_store_fpr32(ctx
, fp0
, fd
);
8902 tcg_temp_free_i32(fp0
);
8907 TCGv_i32 fp0
= tcg_temp_new_i32();
8909 gen_load_fpr32(ctx
, fp0
, fs
);
8910 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8911 gen_store_fpr32(ctx
, fp0
, fd
);
8912 tcg_temp_free_i32(fp0
);
8917 TCGv_i32 fp0
= tcg_temp_new_i32();
8919 gen_load_fpr32(ctx
, fp0
, fs
);
8920 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8921 gen_store_fpr32(ctx
, fp0
, fd
);
8922 tcg_temp_free_i32(fp0
);
8926 check_insn(ctx
, ISA_MIPS32R6
);
8927 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8930 check_insn(ctx
, ISA_MIPS32R6
);
8931 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8934 check_insn(ctx
, ISA_MIPS32R6
);
8935 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8938 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8939 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8942 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8944 TCGLabel
*l1
= gen_new_label();
8948 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8950 fp0
= tcg_temp_new_i32();
8951 gen_load_fpr32(ctx
, fp0
, fs
);
8952 gen_store_fpr32(ctx
, fp0
, fd
);
8953 tcg_temp_free_i32(fp0
);
8958 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8960 TCGLabel
*l1
= gen_new_label();
8964 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8965 fp0
= tcg_temp_new_i32();
8966 gen_load_fpr32(ctx
, fp0
, fs
);
8967 gen_store_fpr32(ctx
, fp0
, fd
);
8968 tcg_temp_free_i32(fp0
);
8975 TCGv_i32 fp0
= tcg_temp_new_i32();
8977 gen_load_fpr32(ctx
, fp0
, fs
);
8978 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8979 gen_store_fpr32(ctx
, fp0
, fd
);
8980 tcg_temp_free_i32(fp0
);
8985 TCGv_i32 fp0
= tcg_temp_new_i32();
8987 gen_load_fpr32(ctx
, fp0
, fs
);
8988 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8989 gen_store_fpr32(ctx
, fp0
, fd
);
8990 tcg_temp_free_i32(fp0
);
8994 check_insn(ctx
, ISA_MIPS32R6
);
8996 TCGv_i32 fp0
= tcg_temp_new_i32();
8997 TCGv_i32 fp1
= tcg_temp_new_i32();
8998 TCGv_i32 fp2
= tcg_temp_new_i32();
8999 gen_load_fpr32(ctx
, fp0
, fs
);
9000 gen_load_fpr32(ctx
, fp1
, ft
);
9001 gen_load_fpr32(ctx
, fp2
, fd
);
9002 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9003 gen_store_fpr32(ctx
, fp2
, fd
);
9004 tcg_temp_free_i32(fp2
);
9005 tcg_temp_free_i32(fp1
);
9006 tcg_temp_free_i32(fp0
);
9010 check_insn(ctx
, ISA_MIPS32R6
);
9012 TCGv_i32 fp0
= tcg_temp_new_i32();
9013 TCGv_i32 fp1
= tcg_temp_new_i32();
9014 TCGv_i32 fp2
= tcg_temp_new_i32();
9015 gen_load_fpr32(ctx
, fp0
, fs
);
9016 gen_load_fpr32(ctx
, fp1
, ft
);
9017 gen_load_fpr32(ctx
, fp2
, fd
);
9018 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9019 gen_store_fpr32(ctx
, fp2
, fd
);
9020 tcg_temp_free_i32(fp2
);
9021 tcg_temp_free_i32(fp1
);
9022 tcg_temp_free_i32(fp0
);
9026 check_insn(ctx
, ISA_MIPS32R6
);
9028 TCGv_i32 fp0
= tcg_temp_new_i32();
9029 gen_load_fpr32(ctx
, fp0
, fs
);
9030 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9031 gen_store_fpr32(ctx
, fp0
, fd
);
9032 tcg_temp_free_i32(fp0
);
9036 check_insn(ctx
, ISA_MIPS32R6
);
9038 TCGv_i32 fp0
= tcg_temp_new_i32();
9039 gen_load_fpr32(ctx
, fp0
, fs
);
9040 gen_helper_float_class_s(fp0
, fp0
);
9041 gen_store_fpr32(ctx
, fp0
, fd
);
9042 tcg_temp_free_i32(fp0
);
9045 case OPC_MIN_S
: /* OPC_RECIP2_S */
9046 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9048 TCGv_i32 fp0
= tcg_temp_new_i32();
9049 TCGv_i32 fp1
= tcg_temp_new_i32();
9050 TCGv_i32 fp2
= tcg_temp_new_i32();
9051 gen_load_fpr32(ctx
, fp0
, fs
);
9052 gen_load_fpr32(ctx
, fp1
, ft
);
9053 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9054 gen_store_fpr32(ctx
, fp2
, fd
);
9055 tcg_temp_free_i32(fp2
);
9056 tcg_temp_free_i32(fp1
);
9057 tcg_temp_free_i32(fp0
);
9060 check_cp1_64bitmode(ctx
);
9062 TCGv_i32 fp0
= tcg_temp_new_i32();
9063 TCGv_i32 fp1
= tcg_temp_new_i32();
9065 gen_load_fpr32(ctx
, fp0
, fs
);
9066 gen_load_fpr32(ctx
, fp1
, ft
);
9067 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9068 tcg_temp_free_i32(fp1
);
9069 gen_store_fpr32(ctx
, fp0
, fd
);
9070 tcg_temp_free_i32(fp0
);
9074 case OPC_MINA_S
: /* OPC_RECIP1_S */
9075 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9077 TCGv_i32 fp0
= tcg_temp_new_i32();
9078 TCGv_i32 fp1
= tcg_temp_new_i32();
9079 TCGv_i32 fp2
= tcg_temp_new_i32();
9080 gen_load_fpr32(ctx
, fp0
, fs
);
9081 gen_load_fpr32(ctx
, fp1
, ft
);
9082 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9083 gen_store_fpr32(ctx
, fp2
, fd
);
9084 tcg_temp_free_i32(fp2
);
9085 tcg_temp_free_i32(fp1
);
9086 tcg_temp_free_i32(fp0
);
9089 check_cp1_64bitmode(ctx
);
9091 TCGv_i32 fp0
= tcg_temp_new_i32();
9093 gen_load_fpr32(ctx
, fp0
, fs
);
9094 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9095 gen_store_fpr32(ctx
, fp0
, fd
);
9096 tcg_temp_free_i32(fp0
);
9100 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9101 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9103 TCGv_i32 fp0
= tcg_temp_new_i32();
9104 TCGv_i32 fp1
= tcg_temp_new_i32();
9105 gen_load_fpr32(ctx
, fp0
, fs
);
9106 gen_load_fpr32(ctx
, fp1
, ft
);
9107 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9108 gen_store_fpr32(ctx
, fp1
, fd
);
9109 tcg_temp_free_i32(fp1
);
9110 tcg_temp_free_i32(fp0
);
9113 check_cp1_64bitmode(ctx
);
9115 TCGv_i32 fp0
= tcg_temp_new_i32();
9117 gen_load_fpr32(ctx
, fp0
, fs
);
9118 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9119 gen_store_fpr32(ctx
, fp0
, fd
);
9120 tcg_temp_free_i32(fp0
);
9124 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9125 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9127 TCGv_i32 fp0
= tcg_temp_new_i32();
9128 TCGv_i32 fp1
= tcg_temp_new_i32();
9129 gen_load_fpr32(ctx
, fp0
, fs
);
9130 gen_load_fpr32(ctx
, fp1
, ft
);
9131 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9132 gen_store_fpr32(ctx
, fp1
, fd
);
9133 tcg_temp_free_i32(fp1
);
9134 tcg_temp_free_i32(fp0
);
9137 check_cp1_64bitmode(ctx
);
9139 TCGv_i32 fp0
= tcg_temp_new_i32();
9140 TCGv_i32 fp1
= tcg_temp_new_i32();
9142 gen_load_fpr32(ctx
, fp0
, fs
);
9143 gen_load_fpr32(ctx
, fp1
, ft
);
9144 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9145 tcg_temp_free_i32(fp1
);
9146 gen_store_fpr32(ctx
, fp0
, fd
);
9147 tcg_temp_free_i32(fp0
);
9152 check_cp1_registers(ctx
, fd
);
9154 TCGv_i32 fp32
= tcg_temp_new_i32();
9155 TCGv_i64 fp64
= tcg_temp_new_i64();
9157 gen_load_fpr32(ctx
, fp32
, fs
);
9158 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9159 tcg_temp_free_i32(fp32
);
9160 gen_store_fpr64(ctx
, fp64
, fd
);
9161 tcg_temp_free_i64(fp64
);
9166 TCGv_i32 fp0
= tcg_temp_new_i32();
9168 gen_load_fpr32(ctx
, fp0
, fs
);
9169 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9170 gen_store_fpr32(ctx
, fp0
, fd
);
9171 tcg_temp_free_i32(fp0
);
9175 check_cp1_64bitmode(ctx
);
9177 TCGv_i32 fp32
= tcg_temp_new_i32();
9178 TCGv_i64 fp64
= tcg_temp_new_i64();
9180 gen_load_fpr32(ctx
, fp32
, fs
);
9181 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9182 tcg_temp_free_i32(fp32
);
9183 gen_store_fpr64(ctx
, fp64
, fd
);
9184 tcg_temp_free_i64(fp64
);
9190 TCGv_i64 fp64
= tcg_temp_new_i64();
9191 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9192 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9194 gen_load_fpr32(ctx
, fp32_0
, fs
);
9195 gen_load_fpr32(ctx
, fp32_1
, ft
);
9196 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9197 tcg_temp_free_i32(fp32_1
);
9198 tcg_temp_free_i32(fp32_0
);
9199 gen_store_fpr64(ctx
, fp64
, fd
);
9200 tcg_temp_free_i64(fp64
);
9212 case OPC_CMP_NGLE_S
:
9219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9220 if (ctx
->opcode
& (1 << 6)) {
9221 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9223 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9227 check_cp1_registers(ctx
, fs
| ft
| fd
);
9229 TCGv_i64 fp0
= tcg_temp_new_i64();
9230 TCGv_i64 fp1
= tcg_temp_new_i64();
9232 gen_load_fpr64(ctx
, fp0
, fs
);
9233 gen_load_fpr64(ctx
, fp1
, ft
);
9234 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9235 tcg_temp_free_i64(fp1
);
9236 gen_store_fpr64(ctx
, fp0
, fd
);
9237 tcg_temp_free_i64(fp0
);
9241 check_cp1_registers(ctx
, fs
| ft
| fd
);
9243 TCGv_i64 fp0
= tcg_temp_new_i64();
9244 TCGv_i64 fp1
= tcg_temp_new_i64();
9246 gen_load_fpr64(ctx
, fp0
, fs
);
9247 gen_load_fpr64(ctx
, fp1
, ft
);
9248 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9249 tcg_temp_free_i64(fp1
);
9250 gen_store_fpr64(ctx
, fp0
, fd
);
9251 tcg_temp_free_i64(fp0
);
9255 check_cp1_registers(ctx
, fs
| ft
| fd
);
9257 TCGv_i64 fp0
= tcg_temp_new_i64();
9258 TCGv_i64 fp1
= tcg_temp_new_i64();
9260 gen_load_fpr64(ctx
, fp0
, fs
);
9261 gen_load_fpr64(ctx
, fp1
, ft
);
9262 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9263 tcg_temp_free_i64(fp1
);
9264 gen_store_fpr64(ctx
, fp0
, fd
);
9265 tcg_temp_free_i64(fp0
);
9269 check_cp1_registers(ctx
, fs
| ft
| fd
);
9271 TCGv_i64 fp0
= tcg_temp_new_i64();
9272 TCGv_i64 fp1
= tcg_temp_new_i64();
9274 gen_load_fpr64(ctx
, fp0
, fs
);
9275 gen_load_fpr64(ctx
, fp1
, ft
);
9276 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9277 tcg_temp_free_i64(fp1
);
9278 gen_store_fpr64(ctx
, fp0
, fd
);
9279 tcg_temp_free_i64(fp0
);
9283 check_cp1_registers(ctx
, fs
| fd
);
9285 TCGv_i64 fp0
= tcg_temp_new_i64();
9287 gen_load_fpr64(ctx
, fp0
, fs
);
9288 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9289 gen_store_fpr64(ctx
, fp0
, fd
);
9290 tcg_temp_free_i64(fp0
);
9294 check_cp1_registers(ctx
, fs
| fd
);
9296 TCGv_i64 fp0
= tcg_temp_new_i64();
9298 gen_load_fpr64(ctx
, fp0
, fs
);
9299 gen_helper_float_abs_d(fp0
, fp0
);
9300 gen_store_fpr64(ctx
, fp0
, fd
);
9301 tcg_temp_free_i64(fp0
);
9305 check_cp1_registers(ctx
, fs
| fd
);
9307 TCGv_i64 fp0
= tcg_temp_new_i64();
9309 gen_load_fpr64(ctx
, fp0
, fs
);
9310 gen_store_fpr64(ctx
, fp0
, fd
);
9311 tcg_temp_free_i64(fp0
);
9315 check_cp1_registers(ctx
, fs
| fd
);
9317 TCGv_i64 fp0
= tcg_temp_new_i64();
9319 gen_load_fpr64(ctx
, fp0
, fs
);
9320 gen_helper_float_chs_d(fp0
, fp0
);
9321 gen_store_fpr64(ctx
, fp0
, fd
);
9322 tcg_temp_free_i64(fp0
);
9326 check_cp1_64bitmode(ctx
);
9328 TCGv_i64 fp0
= tcg_temp_new_i64();
9330 gen_load_fpr64(ctx
, fp0
, fs
);
9331 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9332 gen_store_fpr64(ctx
, fp0
, fd
);
9333 tcg_temp_free_i64(fp0
);
9337 check_cp1_64bitmode(ctx
);
9339 TCGv_i64 fp0
= tcg_temp_new_i64();
9341 gen_load_fpr64(ctx
, fp0
, fs
);
9342 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9343 gen_store_fpr64(ctx
, fp0
, fd
);
9344 tcg_temp_free_i64(fp0
);
9348 check_cp1_64bitmode(ctx
);
9350 TCGv_i64 fp0
= tcg_temp_new_i64();
9352 gen_load_fpr64(ctx
, fp0
, fs
);
9353 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9354 gen_store_fpr64(ctx
, fp0
, fd
);
9355 tcg_temp_free_i64(fp0
);
9359 check_cp1_64bitmode(ctx
);
9361 TCGv_i64 fp0
= tcg_temp_new_i64();
9363 gen_load_fpr64(ctx
, fp0
, fs
);
9364 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9365 gen_store_fpr64(ctx
, fp0
, fd
);
9366 tcg_temp_free_i64(fp0
);
9370 check_cp1_registers(ctx
, fs
);
9372 TCGv_i32 fp32
= tcg_temp_new_i32();
9373 TCGv_i64 fp64
= tcg_temp_new_i64();
9375 gen_load_fpr64(ctx
, fp64
, fs
);
9376 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9377 tcg_temp_free_i64(fp64
);
9378 gen_store_fpr32(ctx
, fp32
, fd
);
9379 tcg_temp_free_i32(fp32
);
9383 check_cp1_registers(ctx
, fs
);
9385 TCGv_i32 fp32
= tcg_temp_new_i32();
9386 TCGv_i64 fp64
= tcg_temp_new_i64();
9388 gen_load_fpr64(ctx
, fp64
, fs
);
9389 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9390 tcg_temp_free_i64(fp64
);
9391 gen_store_fpr32(ctx
, fp32
, fd
);
9392 tcg_temp_free_i32(fp32
);
9396 check_cp1_registers(ctx
, fs
);
9398 TCGv_i32 fp32
= tcg_temp_new_i32();
9399 TCGv_i64 fp64
= tcg_temp_new_i64();
9401 gen_load_fpr64(ctx
, fp64
, fs
);
9402 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9403 tcg_temp_free_i64(fp64
);
9404 gen_store_fpr32(ctx
, fp32
, fd
);
9405 tcg_temp_free_i32(fp32
);
9409 check_cp1_registers(ctx
, fs
);
9411 TCGv_i32 fp32
= tcg_temp_new_i32();
9412 TCGv_i64 fp64
= tcg_temp_new_i64();
9414 gen_load_fpr64(ctx
, fp64
, fs
);
9415 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9416 tcg_temp_free_i64(fp64
);
9417 gen_store_fpr32(ctx
, fp32
, fd
);
9418 tcg_temp_free_i32(fp32
);
9422 check_insn(ctx
, ISA_MIPS32R6
);
9423 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9426 check_insn(ctx
, ISA_MIPS32R6
);
9427 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9430 check_insn(ctx
, ISA_MIPS32R6
);
9431 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9434 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9435 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9438 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9440 TCGLabel
*l1
= gen_new_label();
9444 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9446 fp0
= tcg_temp_new_i64();
9447 gen_load_fpr64(ctx
, fp0
, fs
);
9448 gen_store_fpr64(ctx
, fp0
, fd
);
9449 tcg_temp_free_i64(fp0
);
9454 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9456 TCGLabel
*l1
= gen_new_label();
9460 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9461 fp0
= tcg_temp_new_i64();
9462 gen_load_fpr64(ctx
, fp0
, fs
);
9463 gen_store_fpr64(ctx
, fp0
, fd
);
9464 tcg_temp_free_i64(fp0
);
9470 check_cp1_registers(ctx
, fs
| fd
);
9472 TCGv_i64 fp0
= tcg_temp_new_i64();
9474 gen_load_fpr64(ctx
, fp0
, fs
);
9475 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9476 gen_store_fpr64(ctx
, fp0
, fd
);
9477 tcg_temp_free_i64(fp0
);
9481 check_cp1_registers(ctx
, fs
| fd
);
9483 TCGv_i64 fp0
= tcg_temp_new_i64();
9485 gen_load_fpr64(ctx
, fp0
, fs
);
9486 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9487 gen_store_fpr64(ctx
, fp0
, fd
);
9488 tcg_temp_free_i64(fp0
);
9492 check_insn(ctx
, ISA_MIPS32R6
);
9494 TCGv_i64 fp0
= tcg_temp_new_i64();
9495 TCGv_i64 fp1
= tcg_temp_new_i64();
9496 TCGv_i64 fp2
= tcg_temp_new_i64();
9497 gen_load_fpr64(ctx
, fp0
, fs
);
9498 gen_load_fpr64(ctx
, fp1
, ft
);
9499 gen_load_fpr64(ctx
, fp2
, fd
);
9500 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9501 gen_store_fpr64(ctx
, fp2
, fd
);
9502 tcg_temp_free_i64(fp2
);
9503 tcg_temp_free_i64(fp1
);
9504 tcg_temp_free_i64(fp0
);
9508 check_insn(ctx
, ISA_MIPS32R6
);
9510 TCGv_i64 fp0
= tcg_temp_new_i64();
9511 TCGv_i64 fp1
= tcg_temp_new_i64();
9512 TCGv_i64 fp2
= tcg_temp_new_i64();
9513 gen_load_fpr64(ctx
, fp0
, fs
);
9514 gen_load_fpr64(ctx
, fp1
, ft
);
9515 gen_load_fpr64(ctx
, fp2
, fd
);
9516 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9517 gen_store_fpr64(ctx
, fp2
, fd
);
9518 tcg_temp_free_i64(fp2
);
9519 tcg_temp_free_i64(fp1
);
9520 tcg_temp_free_i64(fp0
);
9524 check_insn(ctx
, ISA_MIPS32R6
);
9526 TCGv_i64 fp0
= tcg_temp_new_i64();
9527 gen_load_fpr64(ctx
, fp0
, fs
);
9528 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9529 gen_store_fpr64(ctx
, fp0
, fd
);
9530 tcg_temp_free_i64(fp0
);
9534 check_insn(ctx
, ISA_MIPS32R6
);
9536 TCGv_i64 fp0
= tcg_temp_new_i64();
9537 gen_load_fpr64(ctx
, fp0
, fs
);
9538 gen_helper_float_class_d(fp0
, fp0
);
9539 gen_store_fpr64(ctx
, fp0
, fd
);
9540 tcg_temp_free_i64(fp0
);
9543 case OPC_MIN_D
: /* OPC_RECIP2_D */
9544 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9546 TCGv_i64 fp0
= tcg_temp_new_i64();
9547 TCGv_i64 fp1
= tcg_temp_new_i64();
9548 gen_load_fpr64(ctx
, fp0
, fs
);
9549 gen_load_fpr64(ctx
, fp1
, ft
);
9550 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9551 gen_store_fpr64(ctx
, fp1
, fd
);
9552 tcg_temp_free_i64(fp1
);
9553 tcg_temp_free_i64(fp0
);
9556 check_cp1_64bitmode(ctx
);
9558 TCGv_i64 fp0
= tcg_temp_new_i64();
9559 TCGv_i64 fp1
= tcg_temp_new_i64();
9561 gen_load_fpr64(ctx
, fp0
, fs
);
9562 gen_load_fpr64(ctx
, fp1
, ft
);
9563 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9564 tcg_temp_free_i64(fp1
);
9565 gen_store_fpr64(ctx
, fp0
, fd
);
9566 tcg_temp_free_i64(fp0
);
9570 case OPC_MINA_D
: /* OPC_RECIP1_D */
9571 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9573 TCGv_i64 fp0
= tcg_temp_new_i64();
9574 TCGv_i64 fp1
= tcg_temp_new_i64();
9575 gen_load_fpr64(ctx
, fp0
, fs
);
9576 gen_load_fpr64(ctx
, fp1
, ft
);
9577 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9578 gen_store_fpr64(ctx
, fp1
, fd
);
9579 tcg_temp_free_i64(fp1
);
9580 tcg_temp_free_i64(fp0
);
9583 check_cp1_64bitmode(ctx
);
9585 TCGv_i64 fp0
= tcg_temp_new_i64();
9587 gen_load_fpr64(ctx
, fp0
, fs
);
9588 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9589 gen_store_fpr64(ctx
, fp0
, fd
);
9590 tcg_temp_free_i64(fp0
);
9594 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9595 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9597 TCGv_i64 fp0
= tcg_temp_new_i64();
9598 TCGv_i64 fp1
= tcg_temp_new_i64();
9599 gen_load_fpr64(ctx
, fp0
, fs
);
9600 gen_load_fpr64(ctx
, fp1
, ft
);
9601 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9602 gen_store_fpr64(ctx
, fp1
, fd
);
9603 tcg_temp_free_i64(fp1
);
9604 tcg_temp_free_i64(fp0
);
9607 check_cp1_64bitmode(ctx
);
9609 TCGv_i64 fp0
= tcg_temp_new_i64();
9611 gen_load_fpr64(ctx
, fp0
, fs
);
9612 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9613 gen_store_fpr64(ctx
, fp0
, fd
);
9614 tcg_temp_free_i64(fp0
);
9618 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9619 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9621 TCGv_i64 fp0
= tcg_temp_new_i64();
9622 TCGv_i64 fp1
= tcg_temp_new_i64();
9623 gen_load_fpr64(ctx
, fp0
, fs
);
9624 gen_load_fpr64(ctx
, fp1
, ft
);
9625 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9626 gen_store_fpr64(ctx
, fp1
, fd
);
9627 tcg_temp_free_i64(fp1
);
9628 tcg_temp_free_i64(fp0
);
9631 check_cp1_64bitmode(ctx
);
9633 TCGv_i64 fp0
= tcg_temp_new_i64();
9634 TCGv_i64 fp1
= tcg_temp_new_i64();
9636 gen_load_fpr64(ctx
, fp0
, fs
);
9637 gen_load_fpr64(ctx
, fp1
, ft
);
9638 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9639 tcg_temp_free_i64(fp1
);
9640 gen_store_fpr64(ctx
, fp0
, fd
);
9641 tcg_temp_free_i64(fp0
);
9654 case OPC_CMP_NGLE_D
:
9661 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9662 if (ctx
->opcode
& (1 << 6)) {
9663 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9665 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9669 check_cp1_registers(ctx
, fs
);
9671 TCGv_i32 fp32
= tcg_temp_new_i32();
9672 TCGv_i64 fp64
= tcg_temp_new_i64();
9674 gen_load_fpr64(ctx
, fp64
, fs
);
9675 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9676 tcg_temp_free_i64(fp64
);
9677 gen_store_fpr32(ctx
, fp32
, fd
);
9678 tcg_temp_free_i32(fp32
);
9682 check_cp1_registers(ctx
, fs
);
9684 TCGv_i32 fp32
= tcg_temp_new_i32();
9685 TCGv_i64 fp64
= tcg_temp_new_i64();
9687 gen_load_fpr64(ctx
, fp64
, fs
);
9688 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9689 tcg_temp_free_i64(fp64
);
9690 gen_store_fpr32(ctx
, fp32
, fd
);
9691 tcg_temp_free_i32(fp32
);
9695 check_cp1_64bitmode(ctx
);
9697 TCGv_i64 fp0
= tcg_temp_new_i64();
9699 gen_load_fpr64(ctx
, fp0
, fs
);
9700 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9701 gen_store_fpr64(ctx
, fp0
, fd
);
9702 tcg_temp_free_i64(fp0
);
9707 TCGv_i32 fp0
= tcg_temp_new_i32();
9709 gen_load_fpr32(ctx
, fp0
, fs
);
9710 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9711 gen_store_fpr32(ctx
, fp0
, fd
);
9712 tcg_temp_free_i32(fp0
);
9716 check_cp1_registers(ctx
, fd
);
9718 TCGv_i32 fp32
= tcg_temp_new_i32();
9719 TCGv_i64 fp64
= tcg_temp_new_i64();
9721 gen_load_fpr32(ctx
, fp32
, fs
);
9722 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9723 tcg_temp_free_i32(fp32
);
9724 gen_store_fpr64(ctx
, fp64
, fd
);
9725 tcg_temp_free_i64(fp64
);
9729 check_cp1_64bitmode(ctx
);
9731 TCGv_i32 fp32
= tcg_temp_new_i32();
9732 TCGv_i64 fp64
= tcg_temp_new_i64();
9734 gen_load_fpr64(ctx
, fp64
, fs
);
9735 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9736 tcg_temp_free_i64(fp64
);
9737 gen_store_fpr32(ctx
, fp32
, fd
);
9738 tcg_temp_free_i32(fp32
);
9742 check_cp1_64bitmode(ctx
);
9744 TCGv_i64 fp0
= tcg_temp_new_i64();
9746 gen_load_fpr64(ctx
, fp0
, fs
);
9747 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9748 gen_store_fpr64(ctx
, fp0
, fd
);
9749 tcg_temp_free_i64(fp0
);
9755 TCGv_i64 fp0
= tcg_temp_new_i64();
9757 gen_load_fpr64(ctx
, fp0
, fs
);
9758 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9759 gen_store_fpr64(ctx
, fp0
, fd
);
9760 tcg_temp_free_i64(fp0
);
9766 TCGv_i64 fp0
= tcg_temp_new_i64();
9767 TCGv_i64 fp1
= tcg_temp_new_i64();
9769 gen_load_fpr64(ctx
, fp0
, fs
);
9770 gen_load_fpr64(ctx
, fp1
, ft
);
9771 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9772 tcg_temp_free_i64(fp1
);
9773 gen_store_fpr64(ctx
, fp0
, fd
);
9774 tcg_temp_free_i64(fp0
);
9780 TCGv_i64 fp0
= tcg_temp_new_i64();
9781 TCGv_i64 fp1
= tcg_temp_new_i64();
9783 gen_load_fpr64(ctx
, fp0
, fs
);
9784 gen_load_fpr64(ctx
, fp1
, ft
);
9785 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9786 tcg_temp_free_i64(fp1
);
9787 gen_store_fpr64(ctx
, fp0
, fd
);
9788 tcg_temp_free_i64(fp0
);
9794 TCGv_i64 fp0
= tcg_temp_new_i64();
9795 TCGv_i64 fp1
= tcg_temp_new_i64();
9797 gen_load_fpr64(ctx
, fp0
, fs
);
9798 gen_load_fpr64(ctx
, fp1
, ft
);
9799 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9800 tcg_temp_free_i64(fp1
);
9801 gen_store_fpr64(ctx
, fp0
, fd
);
9802 tcg_temp_free_i64(fp0
);
9808 TCGv_i64 fp0
= tcg_temp_new_i64();
9810 gen_load_fpr64(ctx
, fp0
, fs
);
9811 gen_helper_float_abs_ps(fp0
, fp0
);
9812 gen_store_fpr64(ctx
, fp0
, fd
);
9813 tcg_temp_free_i64(fp0
);
9819 TCGv_i64 fp0
= tcg_temp_new_i64();
9821 gen_load_fpr64(ctx
, fp0
, fs
);
9822 gen_store_fpr64(ctx
, fp0
, fd
);
9823 tcg_temp_free_i64(fp0
);
9829 TCGv_i64 fp0
= tcg_temp_new_i64();
9831 gen_load_fpr64(ctx
, fp0
, fs
);
9832 gen_helper_float_chs_ps(fp0
, fp0
);
9833 gen_store_fpr64(ctx
, fp0
, fd
);
9834 tcg_temp_free_i64(fp0
);
9839 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9844 TCGLabel
*l1
= gen_new_label();
9848 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9849 fp0
= tcg_temp_new_i64();
9850 gen_load_fpr64(ctx
, fp0
, fs
);
9851 gen_store_fpr64(ctx
, fp0
, fd
);
9852 tcg_temp_free_i64(fp0
);
9859 TCGLabel
*l1
= gen_new_label();
9863 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9864 fp0
= tcg_temp_new_i64();
9865 gen_load_fpr64(ctx
, fp0
, fs
);
9866 gen_store_fpr64(ctx
, fp0
, fd
);
9867 tcg_temp_free_i64(fp0
);
9875 TCGv_i64 fp0
= tcg_temp_new_i64();
9876 TCGv_i64 fp1
= tcg_temp_new_i64();
9878 gen_load_fpr64(ctx
, fp0
, ft
);
9879 gen_load_fpr64(ctx
, fp1
, fs
);
9880 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9881 tcg_temp_free_i64(fp1
);
9882 gen_store_fpr64(ctx
, fp0
, fd
);
9883 tcg_temp_free_i64(fp0
);
9889 TCGv_i64 fp0
= tcg_temp_new_i64();
9890 TCGv_i64 fp1
= tcg_temp_new_i64();
9892 gen_load_fpr64(ctx
, fp0
, ft
);
9893 gen_load_fpr64(ctx
, fp1
, fs
);
9894 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9895 tcg_temp_free_i64(fp1
);
9896 gen_store_fpr64(ctx
, fp0
, fd
);
9897 tcg_temp_free_i64(fp0
);
9903 TCGv_i64 fp0
= tcg_temp_new_i64();
9904 TCGv_i64 fp1
= tcg_temp_new_i64();
9906 gen_load_fpr64(ctx
, fp0
, fs
);
9907 gen_load_fpr64(ctx
, fp1
, ft
);
9908 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9909 tcg_temp_free_i64(fp1
);
9910 gen_store_fpr64(ctx
, fp0
, fd
);
9911 tcg_temp_free_i64(fp0
);
9917 TCGv_i64 fp0
= tcg_temp_new_i64();
9919 gen_load_fpr64(ctx
, fp0
, fs
);
9920 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9921 gen_store_fpr64(ctx
, fp0
, fd
);
9922 tcg_temp_free_i64(fp0
);
9928 TCGv_i64 fp0
= tcg_temp_new_i64();
9930 gen_load_fpr64(ctx
, fp0
, fs
);
9931 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9932 gen_store_fpr64(ctx
, fp0
, fd
);
9933 tcg_temp_free_i64(fp0
);
9939 TCGv_i64 fp0
= tcg_temp_new_i64();
9940 TCGv_i64 fp1
= tcg_temp_new_i64();
9942 gen_load_fpr64(ctx
, fp0
, fs
);
9943 gen_load_fpr64(ctx
, fp1
, ft
);
9944 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9945 tcg_temp_free_i64(fp1
);
9946 gen_store_fpr64(ctx
, fp0
, fd
);
9947 tcg_temp_free_i64(fp0
);
9951 check_cp1_64bitmode(ctx
);
9953 TCGv_i32 fp0
= tcg_temp_new_i32();
9955 gen_load_fpr32h(ctx
, fp0
, fs
);
9956 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9957 gen_store_fpr32(ctx
, fp0
, fd
);
9958 tcg_temp_free_i32(fp0
);
9964 TCGv_i64 fp0
= tcg_temp_new_i64();
9966 gen_load_fpr64(ctx
, fp0
, fs
);
9967 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9968 gen_store_fpr64(ctx
, fp0
, fd
);
9969 tcg_temp_free_i64(fp0
);
9973 check_cp1_64bitmode(ctx
);
9975 TCGv_i32 fp0
= tcg_temp_new_i32();
9977 gen_load_fpr32(ctx
, fp0
, fs
);
9978 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9979 gen_store_fpr32(ctx
, fp0
, fd
);
9980 tcg_temp_free_i32(fp0
);
9986 TCGv_i32 fp0
= tcg_temp_new_i32();
9987 TCGv_i32 fp1
= tcg_temp_new_i32();
9989 gen_load_fpr32(ctx
, fp0
, fs
);
9990 gen_load_fpr32(ctx
, fp1
, ft
);
9991 gen_store_fpr32h(ctx
, fp0
, fd
);
9992 gen_store_fpr32(ctx
, fp1
, fd
);
9993 tcg_temp_free_i32(fp0
);
9994 tcg_temp_free_i32(fp1
);
10000 TCGv_i32 fp0
= tcg_temp_new_i32();
10001 TCGv_i32 fp1
= tcg_temp_new_i32();
10003 gen_load_fpr32(ctx
, fp0
, fs
);
10004 gen_load_fpr32h(ctx
, fp1
, ft
);
10005 gen_store_fpr32(ctx
, fp1
, fd
);
10006 gen_store_fpr32h(ctx
, fp0
, fd
);
10007 tcg_temp_free_i32(fp0
);
10008 tcg_temp_free_i32(fp1
);
10014 TCGv_i32 fp0
= tcg_temp_new_i32();
10015 TCGv_i32 fp1
= tcg_temp_new_i32();
10017 gen_load_fpr32h(ctx
, fp0
, fs
);
10018 gen_load_fpr32(ctx
, fp1
, ft
);
10019 gen_store_fpr32(ctx
, fp1
, fd
);
10020 gen_store_fpr32h(ctx
, fp0
, fd
);
10021 tcg_temp_free_i32(fp0
);
10022 tcg_temp_free_i32(fp1
);
10028 TCGv_i32 fp0
= tcg_temp_new_i32();
10029 TCGv_i32 fp1
= tcg_temp_new_i32();
10031 gen_load_fpr32h(ctx
, fp0
, fs
);
10032 gen_load_fpr32h(ctx
, fp1
, ft
);
10033 gen_store_fpr32(ctx
, fp1
, fd
);
10034 gen_store_fpr32h(ctx
, fp0
, fd
);
10035 tcg_temp_free_i32(fp0
);
10036 tcg_temp_free_i32(fp1
);
10040 case OPC_CMP_UN_PS
:
10041 case OPC_CMP_EQ_PS
:
10042 case OPC_CMP_UEQ_PS
:
10043 case OPC_CMP_OLT_PS
:
10044 case OPC_CMP_ULT_PS
:
10045 case OPC_CMP_OLE_PS
:
10046 case OPC_CMP_ULE_PS
:
10047 case OPC_CMP_SF_PS
:
10048 case OPC_CMP_NGLE_PS
:
10049 case OPC_CMP_SEQ_PS
:
10050 case OPC_CMP_NGL_PS
:
10051 case OPC_CMP_LT_PS
:
10052 case OPC_CMP_NGE_PS
:
10053 case OPC_CMP_LE_PS
:
10054 case OPC_CMP_NGT_PS
:
10055 if (ctx
->opcode
& (1 << 6)) {
10056 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10058 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10062 MIPS_INVAL("farith");
10063 generate_exception_end(ctx
, EXCP_RI
);
10068 /* Coprocessor 3 (FPU) */
10069 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10070 int fd
, int fs
, int base
, int index
)
10072 TCGv t0
= tcg_temp_new();
10075 gen_load_gpr(t0
, index
);
10076 } else if (index
== 0) {
10077 gen_load_gpr(t0
, base
);
10079 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10081 /* Don't do NOP if destination is zero: we must perform the actual
10087 TCGv_i32 fp0
= tcg_temp_new_i32();
10089 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10090 tcg_gen_trunc_tl_i32(fp0
, t0
);
10091 gen_store_fpr32(ctx
, fp0
, fd
);
10092 tcg_temp_free_i32(fp0
);
10097 check_cp1_registers(ctx
, fd
);
10099 TCGv_i64 fp0
= tcg_temp_new_i64();
10100 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10101 gen_store_fpr64(ctx
, fp0
, fd
);
10102 tcg_temp_free_i64(fp0
);
10106 check_cp1_64bitmode(ctx
);
10107 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10109 TCGv_i64 fp0
= tcg_temp_new_i64();
10111 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10112 gen_store_fpr64(ctx
, fp0
, fd
);
10113 tcg_temp_free_i64(fp0
);
10119 TCGv_i32 fp0
= tcg_temp_new_i32();
10120 gen_load_fpr32(ctx
, fp0
, fs
);
10121 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10122 tcg_temp_free_i32(fp0
);
10127 check_cp1_registers(ctx
, fs
);
10129 TCGv_i64 fp0
= tcg_temp_new_i64();
10130 gen_load_fpr64(ctx
, fp0
, fs
);
10131 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10132 tcg_temp_free_i64(fp0
);
10136 check_cp1_64bitmode(ctx
);
10137 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10139 TCGv_i64 fp0
= tcg_temp_new_i64();
10140 gen_load_fpr64(ctx
, fp0
, fs
);
10141 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10142 tcg_temp_free_i64(fp0
);
10149 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10150 int fd
, int fr
, int fs
, int ft
)
10156 TCGv t0
= tcg_temp_local_new();
10157 TCGv_i32 fp
= tcg_temp_new_i32();
10158 TCGv_i32 fph
= tcg_temp_new_i32();
10159 TCGLabel
*l1
= gen_new_label();
10160 TCGLabel
*l2
= gen_new_label();
10162 gen_load_gpr(t0
, fr
);
10163 tcg_gen_andi_tl(t0
, t0
, 0x7);
10165 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10166 gen_load_fpr32(ctx
, fp
, fs
);
10167 gen_load_fpr32h(ctx
, fph
, fs
);
10168 gen_store_fpr32(ctx
, fp
, fd
);
10169 gen_store_fpr32h(ctx
, fph
, fd
);
10172 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10174 #ifdef TARGET_WORDS_BIGENDIAN
10175 gen_load_fpr32(ctx
, fp
, fs
);
10176 gen_load_fpr32h(ctx
, fph
, ft
);
10177 gen_store_fpr32h(ctx
, fp
, fd
);
10178 gen_store_fpr32(ctx
, fph
, fd
);
10180 gen_load_fpr32h(ctx
, fph
, fs
);
10181 gen_load_fpr32(ctx
, fp
, ft
);
10182 gen_store_fpr32(ctx
, fph
, fd
);
10183 gen_store_fpr32h(ctx
, fp
, fd
);
10186 tcg_temp_free_i32(fp
);
10187 tcg_temp_free_i32(fph
);
10193 TCGv_i32 fp0
= tcg_temp_new_i32();
10194 TCGv_i32 fp1
= tcg_temp_new_i32();
10195 TCGv_i32 fp2
= tcg_temp_new_i32();
10197 gen_load_fpr32(ctx
, fp0
, fs
);
10198 gen_load_fpr32(ctx
, fp1
, ft
);
10199 gen_load_fpr32(ctx
, fp2
, fr
);
10200 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10201 tcg_temp_free_i32(fp0
);
10202 tcg_temp_free_i32(fp1
);
10203 gen_store_fpr32(ctx
, fp2
, fd
);
10204 tcg_temp_free_i32(fp2
);
10209 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10211 TCGv_i64 fp0
= tcg_temp_new_i64();
10212 TCGv_i64 fp1
= tcg_temp_new_i64();
10213 TCGv_i64 fp2
= tcg_temp_new_i64();
10215 gen_load_fpr64(ctx
, fp0
, fs
);
10216 gen_load_fpr64(ctx
, fp1
, ft
);
10217 gen_load_fpr64(ctx
, fp2
, fr
);
10218 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10219 tcg_temp_free_i64(fp0
);
10220 tcg_temp_free_i64(fp1
);
10221 gen_store_fpr64(ctx
, fp2
, fd
);
10222 tcg_temp_free_i64(fp2
);
10228 TCGv_i64 fp0
= tcg_temp_new_i64();
10229 TCGv_i64 fp1
= tcg_temp_new_i64();
10230 TCGv_i64 fp2
= tcg_temp_new_i64();
10232 gen_load_fpr64(ctx
, fp0
, fs
);
10233 gen_load_fpr64(ctx
, fp1
, ft
);
10234 gen_load_fpr64(ctx
, fp2
, fr
);
10235 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10236 tcg_temp_free_i64(fp0
);
10237 tcg_temp_free_i64(fp1
);
10238 gen_store_fpr64(ctx
, fp2
, fd
);
10239 tcg_temp_free_i64(fp2
);
10245 TCGv_i32 fp0
= tcg_temp_new_i32();
10246 TCGv_i32 fp1
= tcg_temp_new_i32();
10247 TCGv_i32 fp2
= tcg_temp_new_i32();
10249 gen_load_fpr32(ctx
, fp0
, fs
);
10250 gen_load_fpr32(ctx
, fp1
, ft
);
10251 gen_load_fpr32(ctx
, fp2
, fr
);
10252 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10253 tcg_temp_free_i32(fp0
);
10254 tcg_temp_free_i32(fp1
);
10255 gen_store_fpr32(ctx
, fp2
, fd
);
10256 tcg_temp_free_i32(fp2
);
10261 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10263 TCGv_i64 fp0
= tcg_temp_new_i64();
10264 TCGv_i64 fp1
= tcg_temp_new_i64();
10265 TCGv_i64 fp2
= tcg_temp_new_i64();
10267 gen_load_fpr64(ctx
, fp0
, fs
);
10268 gen_load_fpr64(ctx
, fp1
, ft
);
10269 gen_load_fpr64(ctx
, fp2
, fr
);
10270 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10271 tcg_temp_free_i64(fp0
);
10272 tcg_temp_free_i64(fp1
);
10273 gen_store_fpr64(ctx
, fp2
, fd
);
10274 tcg_temp_free_i64(fp2
);
10280 TCGv_i64 fp0
= tcg_temp_new_i64();
10281 TCGv_i64 fp1
= tcg_temp_new_i64();
10282 TCGv_i64 fp2
= tcg_temp_new_i64();
10284 gen_load_fpr64(ctx
, fp0
, fs
);
10285 gen_load_fpr64(ctx
, fp1
, ft
);
10286 gen_load_fpr64(ctx
, fp2
, fr
);
10287 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10288 tcg_temp_free_i64(fp0
);
10289 tcg_temp_free_i64(fp1
);
10290 gen_store_fpr64(ctx
, fp2
, fd
);
10291 tcg_temp_free_i64(fp2
);
10297 TCGv_i32 fp0
= tcg_temp_new_i32();
10298 TCGv_i32 fp1
= tcg_temp_new_i32();
10299 TCGv_i32 fp2
= tcg_temp_new_i32();
10301 gen_load_fpr32(ctx
, fp0
, fs
);
10302 gen_load_fpr32(ctx
, fp1
, ft
);
10303 gen_load_fpr32(ctx
, fp2
, fr
);
10304 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10305 tcg_temp_free_i32(fp0
);
10306 tcg_temp_free_i32(fp1
);
10307 gen_store_fpr32(ctx
, fp2
, fd
);
10308 tcg_temp_free_i32(fp2
);
10313 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10315 TCGv_i64 fp0
= tcg_temp_new_i64();
10316 TCGv_i64 fp1
= tcg_temp_new_i64();
10317 TCGv_i64 fp2
= tcg_temp_new_i64();
10319 gen_load_fpr64(ctx
, fp0
, fs
);
10320 gen_load_fpr64(ctx
, fp1
, ft
);
10321 gen_load_fpr64(ctx
, fp2
, fr
);
10322 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10323 tcg_temp_free_i64(fp0
);
10324 tcg_temp_free_i64(fp1
);
10325 gen_store_fpr64(ctx
, fp2
, fd
);
10326 tcg_temp_free_i64(fp2
);
10332 TCGv_i64 fp0
= tcg_temp_new_i64();
10333 TCGv_i64 fp1
= tcg_temp_new_i64();
10334 TCGv_i64 fp2
= tcg_temp_new_i64();
10336 gen_load_fpr64(ctx
, fp0
, fs
);
10337 gen_load_fpr64(ctx
, fp1
, ft
);
10338 gen_load_fpr64(ctx
, fp2
, fr
);
10339 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10340 tcg_temp_free_i64(fp0
);
10341 tcg_temp_free_i64(fp1
);
10342 gen_store_fpr64(ctx
, fp2
, fd
);
10343 tcg_temp_free_i64(fp2
);
10349 TCGv_i32 fp0
= tcg_temp_new_i32();
10350 TCGv_i32 fp1
= tcg_temp_new_i32();
10351 TCGv_i32 fp2
= tcg_temp_new_i32();
10353 gen_load_fpr32(ctx
, fp0
, fs
);
10354 gen_load_fpr32(ctx
, fp1
, ft
);
10355 gen_load_fpr32(ctx
, fp2
, fr
);
10356 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10357 tcg_temp_free_i32(fp0
);
10358 tcg_temp_free_i32(fp1
);
10359 gen_store_fpr32(ctx
, fp2
, fd
);
10360 tcg_temp_free_i32(fp2
);
10365 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10367 TCGv_i64 fp0
= tcg_temp_new_i64();
10368 TCGv_i64 fp1
= tcg_temp_new_i64();
10369 TCGv_i64 fp2
= tcg_temp_new_i64();
10371 gen_load_fpr64(ctx
, fp0
, fs
);
10372 gen_load_fpr64(ctx
, fp1
, ft
);
10373 gen_load_fpr64(ctx
, fp2
, fr
);
10374 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10375 tcg_temp_free_i64(fp0
);
10376 tcg_temp_free_i64(fp1
);
10377 gen_store_fpr64(ctx
, fp2
, fd
);
10378 tcg_temp_free_i64(fp2
);
10384 TCGv_i64 fp0
= tcg_temp_new_i64();
10385 TCGv_i64 fp1
= tcg_temp_new_i64();
10386 TCGv_i64 fp2
= tcg_temp_new_i64();
10388 gen_load_fpr64(ctx
, fp0
, fs
);
10389 gen_load_fpr64(ctx
, fp1
, ft
);
10390 gen_load_fpr64(ctx
, fp2
, fr
);
10391 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10392 tcg_temp_free_i64(fp0
);
10393 tcg_temp_free_i64(fp1
);
10394 gen_store_fpr64(ctx
, fp2
, fd
);
10395 tcg_temp_free_i64(fp2
);
10399 MIPS_INVAL("flt3_arith");
10400 generate_exception_end(ctx
, EXCP_RI
);
10405 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10409 #if !defined(CONFIG_USER_ONLY)
10410 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10411 Therefore only check the ISA in system mode. */
10412 check_insn(ctx
, ISA_MIPS32R2
);
10414 t0
= tcg_temp_new();
10418 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10419 gen_store_gpr(t0
, rt
);
10422 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10423 gen_store_gpr(t0
, rt
);
10426 gen_helper_rdhwr_cc(t0
, cpu_env
);
10427 gen_store_gpr(t0
, rt
);
10430 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10431 gen_store_gpr(t0
, rt
);
10434 check_insn(ctx
, ISA_MIPS32R6
);
10436 /* Performance counter registers are not implemented other than
10437 * control register 0.
10439 generate_exception(ctx
, EXCP_RI
);
10441 gen_helper_rdhwr_performance(t0
, cpu_env
);
10442 gen_store_gpr(t0
, rt
);
10445 check_insn(ctx
, ISA_MIPS32R6
);
10446 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10447 gen_store_gpr(t0
, rt
);
10450 #if defined(CONFIG_USER_ONLY)
10451 tcg_gen_ld_tl(t0
, cpu_env
,
10452 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10453 gen_store_gpr(t0
, rt
);
10456 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10457 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10458 tcg_gen_ld_tl(t0
, cpu_env
,
10459 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10460 gen_store_gpr(t0
, rt
);
10462 generate_exception_end(ctx
, EXCP_RI
);
10466 default: /* Invalid */
10467 MIPS_INVAL("rdhwr");
10468 generate_exception_end(ctx
, EXCP_RI
);
10474 static inline void clear_branch_hflags(DisasContext
*ctx
)
10476 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10477 if (ctx
->bstate
== BS_NONE
) {
10478 save_cpu_state(ctx
, 0);
10480 /* it is not safe to save ctx->hflags as hflags may be changed
10481 in execution time by the instruction in delay / forbidden slot. */
10482 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10486 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10488 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10489 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10490 /* Branches completion */
10491 clear_branch_hflags(ctx
);
10492 ctx
->bstate
= BS_BRANCH
;
10493 /* FIXME: Need to clear can_do_io. */
10494 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10495 case MIPS_HFLAG_FBNSLOT
:
10496 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10499 /* unconditional branch */
10500 if (proc_hflags
& MIPS_HFLAG_BX
) {
10501 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10503 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10505 case MIPS_HFLAG_BL
:
10506 /* blikely taken case */
10507 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10509 case MIPS_HFLAG_BC
:
10510 /* Conditional branch */
10512 TCGLabel
*l1
= gen_new_label();
10514 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10515 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10517 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10520 case MIPS_HFLAG_BR
:
10521 /* unconditional branch to register */
10522 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10523 TCGv t0
= tcg_temp_new();
10524 TCGv_i32 t1
= tcg_temp_new_i32();
10526 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10527 tcg_gen_trunc_tl_i32(t1
, t0
);
10529 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10530 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10531 tcg_gen_or_i32(hflags
, hflags
, t1
);
10532 tcg_temp_free_i32(t1
);
10534 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10536 tcg_gen_mov_tl(cpu_PC
, btarget
);
10538 if (ctx
->singlestep_enabled
) {
10539 save_cpu_state(ctx
, 0);
10540 gen_helper_raise_exception_debug(cpu_env
);
10542 tcg_gen_exit_tb(0);
10545 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10551 /* Compact Branches */
10552 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10553 int rs
, int rt
, int32_t offset
)
10555 int bcond_compute
= 0;
10556 TCGv t0
= tcg_temp_new();
10557 TCGv t1
= tcg_temp_new();
10558 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10560 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10561 #ifdef MIPS_DEBUG_DISAS
10562 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10565 generate_exception_end(ctx
, EXCP_RI
);
10569 /* Load needed operands and calculate btarget */
10571 /* compact branch */
10572 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10573 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10574 gen_load_gpr(t0
, rs
);
10575 gen_load_gpr(t1
, rt
);
10577 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10578 if (rs
<= rt
&& rs
== 0) {
10579 /* OPC_BEQZALC, OPC_BNEZALC */
10580 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10583 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10584 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10585 gen_load_gpr(t0
, rs
);
10586 gen_load_gpr(t1
, rt
);
10588 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10590 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10591 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10592 if (rs
== 0 || rs
== rt
) {
10593 /* OPC_BLEZALC, OPC_BGEZALC */
10594 /* OPC_BGTZALC, OPC_BLTZALC */
10595 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10597 gen_load_gpr(t0
, rs
);
10598 gen_load_gpr(t1
, rt
);
10600 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10604 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10609 /* OPC_BEQZC, OPC_BNEZC */
10610 gen_load_gpr(t0
, rs
);
10612 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10614 /* OPC_JIC, OPC_JIALC */
10615 TCGv tbase
= tcg_temp_new();
10616 TCGv toffset
= tcg_temp_new();
10618 gen_load_gpr(tbase
, rt
);
10619 tcg_gen_movi_tl(toffset
, offset
);
10620 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10621 tcg_temp_free(tbase
);
10622 tcg_temp_free(toffset
);
10626 MIPS_INVAL("Compact branch/jump");
10627 generate_exception_end(ctx
, EXCP_RI
);
10631 if (bcond_compute
== 0) {
10632 /* Uncoditional compact branch */
10635 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10638 ctx
->hflags
|= MIPS_HFLAG_BR
;
10641 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10644 ctx
->hflags
|= MIPS_HFLAG_B
;
10647 MIPS_INVAL("Compact branch/jump");
10648 generate_exception_end(ctx
, EXCP_RI
);
10652 /* Generating branch here as compact branches don't have delay slot */
10653 gen_branch(ctx
, 4);
10655 /* Conditional compact branch */
10656 TCGLabel
*fs
= gen_new_label();
10657 save_cpu_state(ctx
, 0);
10660 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10661 if (rs
== 0 && rt
!= 0) {
10663 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10664 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10666 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10669 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10672 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10673 if (rs
== 0 && rt
!= 0) {
10675 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10676 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10678 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10681 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10684 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10685 if (rs
== 0 && rt
!= 0) {
10687 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10688 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10690 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10693 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10696 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10697 if (rs
== 0 && rt
!= 0) {
10699 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10700 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10702 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10705 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10708 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10709 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10711 /* OPC_BOVC, OPC_BNVC */
10712 TCGv t2
= tcg_temp_new();
10713 TCGv t3
= tcg_temp_new();
10714 TCGv t4
= tcg_temp_new();
10715 TCGv input_overflow
= tcg_temp_new();
10717 gen_load_gpr(t0
, rs
);
10718 gen_load_gpr(t1
, rt
);
10719 tcg_gen_ext32s_tl(t2
, t0
);
10720 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10721 tcg_gen_ext32s_tl(t3
, t1
);
10722 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10723 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10725 tcg_gen_add_tl(t4
, t2
, t3
);
10726 tcg_gen_ext32s_tl(t4
, t4
);
10727 tcg_gen_xor_tl(t2
, t2
, t3
);
10728 tcg_gen_xor_tl(t3
, t4
, t3
);
10729 tcg_gen_andc_tl(t2
, t3
, t2
);
10730 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10731 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10732 if (opc
== OPC_BOVC
) {
10734 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10737 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10739 tcg_temp_free(input_overflow
);
10743 } else if (rs
< rt
&& rs
== 0) {
10744 /* OPC_BEQZALC, OPC_BNEZALC */
10745 if (opc
== OPC_BEQZALC
) {
10747 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10750 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10753 /* OPC_BEQC, OPC_BNEC */
10754 if (opc
== OPC_BEQC
) {
10756 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10759 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10764 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10767 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10770 MIPS_INVAL("Compact conditional branch/jump");
10771 generate_exception_end(ctx
, EXCP_RI
);
10775 /* Generating branch here as compact branches don't have delay slot */
10776 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10779 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10787 /* ISA extensions (ASEs) */
10788 /* MIPS16 extension to MIPS32 */
10790 /* MIPS16 major opcodes */
10792 M16_OPC_ADDIUSP
= 0x00,
10793 M16_OPC_ADDIUPC
= 0x01,
10795 M16_OPC_JAL
= 0x03,
10796 M16_OPC_BEQZ
= 0x04,
10797 M16_OPC_BNEQZ
= 0x05,
10798 M16_OPC_SHIFT
= 0x06,
10800 M16_OPC_RRIA
= 0x08,
10801 M16_OPC_ADDIU8
= 0x09,
10802 M16_OPC_SLTI
= 0x0a,
10803 M16_OPC_SLTIU
= 0x0b,
10806 M16_OPC_CMPI
= 0x0e,
10810 M16_OPC_LWSP
= 0x12,
10812 M16_OPC_LBU
= 0x14,
10813 M16_OPC_LHU
= 0x15,
10814 M16_OPC_LWPC
= 0x16,
10815 M16_OPC_LWU
= 0x17,
10818 M16_OPC_SWSP
= 0x1a,
10820 M16_OPC_RRR
= 0x1c,
10822 M16_OPC_EXTEND
= 0x1e,
10826 /* I8 funct field */
10845 /* RR funct field */
10879 /* I64 funct field */
10887 I64_DADDIUPC
= 0x6,
10891 /* RR ry field for CNVT */
10893 RR_RY_CNVT_ZEB
= 0x0,
10894 RR_RY_CNVT_ZEH
= 0x1,
10895 RR_RY_CNVT_ZEW
= 0x2,
10896 RR_RY_CNVT_SEB
= 0x4,
10897 RR_RY_CNVT_SEH
= 0x5,
10898 RR_RY_CNVT_SEW
= 0x6,
10901 static int xlat (int r
)
10903 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10908 static void gen_mips16_save (DisasContext
*ctx
,
10909 int xsregs
, int aregs
,
10910 int do_ra
, int do_s0
, int do_s1
,
10913 TCGv t0
= tcg_temp_new();
10914 TCGv t1
= tcg_temp_new();
10915 TCGv t2
= tcg_temp_new();
10945 generate_exception_end(ctx
, EXCP_RI
);
10951 gen_base_offset_addr(ctx
, t0
, 29, 12);
10952 gen_load_gpr(t1
, 7);
10953 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10956 gen_base_offset_addr(ctx
, t0
, 29, 8);
10957 gen_load_gpr(t1
, 6);
10958 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10961 gen_base_offset_addr(ctx
, t0
, 29, 4);
10962 gen_load_gpr(t1
, 5);
10963 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10966 gen_base_offset_addr(ctx
, t0
, 29, 0);
10967 gen_load_gpr(t1
, 4);
10968 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10971 gen_load_gpr(t0
, 29);
10973 #define DECR_AND_STORE(reg) do { \
10974 tcg_gen_movi_tl(t2, -4); \
10975 gen_op_addr_add(ctx, t0, t0, t2); \
10976 gen_load_gpr(t1, reg); \
10977 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10981 DECR_AND_STORE(31);
10986 DECR_AND_STORE(30);
10989 DECR_AND_STORE(23);
10992 DECR_AND_STORE(22);
10995 DECR_AND_STORE(21);
10998 DECR_AND_STORE(20);
11001 DECR_AND_STORE(19);
11004 DECR_AND_STORE(18);
11008 DECR_AND_STORE(17);
11011 DECR_AND_STORE(16);
11041 generate_exception_end(ctx
, EXCP_RI
);
11057 #undef DECR_AND_STORE
11059 tcg_gen_movi_tl(t2
, -framesize
);
11060 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11066 static void gen_mips16_restore (DisasContext
*ctx
,
11067 int xsregs
, int aregs
,
11068 int do_ra
, int do_s0
, int do_s1
,
11072 TCGv t0
= tcg_temp_new();
11073 TCGv t1
= tcg_temp_new();
11074 TCGv t2
= tcg_temp_new();
11076 tcg_gen_movi_tl(t2
, framesize
);
11077 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11079 #define DECR_AND_LOAD(reg) do { \
11080 tcg_gen_movi_tl(t2, -4); \
11081 gen_op_addr_add(ctx, t0, t0, t2); \
11082 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11083 gen_store_gpr(t1, reg); \
11147 generate_exception_end(ctx
, EXCP_RI
);
11163 #undef DECR_AND_LOAD
11165 tcg_gen_movi_tl(t2
, framesize
);
11166 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11172 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11173 int is_64_bit
, int extended
)
11177 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11178 generate_exception_end(ctx
, EXCP_RI
);
11182 t0
= tcg_temp_new();
11184 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11185 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11187 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11193 #if defined(TARGET_MIPS64)
11194 static void decode_i64_mips16 (DisasContext
*ctx
,
11195 int ry
, int funct
, int16_t offset
,
11200 check_insn(ctx
, ISA_MIPS3
);
11201 check_mips_64(ctx
);
11202 offset
= extended
? offset
: offset
<< 3;
11203 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11206 check_insn(ctx
, ISA_MIPS3
);
11207 check_mips_64(ctx
);
11208 offset
= extended
? offset
: offset
<< 3;
11209 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11212 check_insn(ctx
, ISA_MIPS3
);
11213 check_mips_64(ctx
);
11214 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11215 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11218 check_insn(ctx
, ISA_MIPS3
);
11219 check_mips_64(ctx
);
11220 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11221 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11224 check_insn(ctx
, ISA_MIPS3
);
11225 check_mips_64(ctx
);
11226 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11227 generate_exception_end(ctx
, EXCP_RI
);
11229 offset
= extended
? offset
: offset
<< 3;
11230 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11234 check_insn(ctx
, ISA_MIPS3
);
11235 check_mips_64(ctx
);
11236 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11237 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11240 check_insn(ctx
, ISA_MIPS3
);
11241 check_mips_64(ctx
);
11242 offset
= extended
? offset
: offset
<< 2;
11243 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11246 check_insn(ctx
, ISA_MIPS3
);
11247 check_mips_64(ctx
);
11248 offset
= extended
? offset
: offset
<< 2;
11249 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11255 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11257 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11258 int op
, rx
, ry
, funct
, sa
;
11259 int16_t imm
, offset
;
11261 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11262 op
= (ctx
->opcode
>> 11) & 0x1f;
11263 sa
= (ctx
->opcode
>> 22) & 0x1f;
11264 funct
= (ctx
->opcode
>> 8) & 0x7;
11265 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11266 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11267 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11268 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11269 | (ctx
->opcode
& 0x1f));
11271 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11274 case M16_OPC_ADDIUSP
:
11275 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11277 case M16_OPC_ADDIUPC
:
11278 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11281 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11282 /* No delay slot, so just process as a normal instruction */
11285 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11286 /* No delay slot, so just process as a normal instruction */
11288 case M16_OPC_BNEQZ
:
11289 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11290 /* No delay slot, so just process as a normal instruction */
11292 case M16_OPC_SHIFT
:
11293 switch (ctx
->opcode
& 0x3) {
11295 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11298 #if defined(TARGET_MIPS64)
11299 check_mips_64(ctx
);
11300 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11302 generate_exception_end(ctx
, EXCP_RI
);
11306 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11309 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11313 #if defined(TARGET_MIPS64)
11315 check_insn(ctx
, ISA_MIPS3
);
11316 check_mips_64(ctx
);
11317 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11321 imm
= ctx
->opcode
& 0xf;
11322 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11323 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11324 imm
= (int16_t) (imm
<< 1) >> 1;
11325 if ((ctx
->opcode
>> 4) & 0x1) {
11326 #if defined(TARGET_MIPS64)
11327 check_mips_64(ctx
);
11328 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11330 generate_exception_end(ctx
, EXCP_RI
);
11333 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11336 case M16_OPC_ADDIU8
:
11337 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11340 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11342 case M16_OPC_SLTIU
:
11343 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11348 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11351 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11354 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11357 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11360 check_insn(ctx
, ISA_MIPS32
);
11362 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11363 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11364 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11365 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11366 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11367 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11368 | (ctx
->opcode
& 0xf)) << 3;
11370 if (ctx
->opcode
& (1 << 7)) {
11371 gen_mips16_save(ctx
, xsregs
, aregs
,
11372 do_ra
, do_s0
, do_s1
,
11375 gen_mips16_restore(ctx
, xsregs
, aregs
,
11376 do_ra
, do_s0
, do_s1
,
11382 generate_exception_end(ctx
, EXCP_RI
);
11387 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11390 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11392 #if defined(TARGET_MIPS64)
11394 check_insn(ctx
, ISA_MIPS3
);
11395 check_mips_64(ctx
);
11396 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11400 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11403 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11406 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11409 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11412 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11415 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11418 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11420 #if defined(TARGET_MIPS64)
11422 check_insn(ctx
, ISA_MIPS3
);
11423 check_mips_64(ctx
);
11424 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11428 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11431 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11434 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11437 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11439 #if defined(TARGET_MIPS64)
11441 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11445 generate_exception_end(ctx
, EXCP_RI
);
11452 static inline bool is_uhi(int sdbbp_code
)
11454 #ifdef CONFIG_USER_ONLY
11457 return semihosting_enabled() && sdbbp_code
== 1;
11461 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11465 int op
, cnvt_op
, op1
, offset
;
11469 op
= (ctx
->opcode
>> 11) & 0x1f;
11470 sa
= (ctx
->opcode
>> 2) & 0x7;
11471 sa
= sa
== 0 ? 8 : sa
;
11472 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11473 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11474 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11475 op1
= offset
= ctx
->opcode
& 0x1f;
11480 case M16_OPC_ADDIUSP
:
11482 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11484 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11487 case M16_OPC_ADDIUPC
:
11488 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11491 offset
= (ctx
->opcode
& 0x7ff) << 1;
11492 offset
= (int16_t)(offset
<< 4) >> 4;
11493 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11494 /* No delay slot, so just process as a normal instruction */
11497 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11498 offset
= (((ctx
->opcode
& 0x1f) << 21)
11499 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11501 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11502 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11506 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11507 ((int8_t)ctx
->opcode
) << 1, 0);
11508 /* No delay slot, so just process as a normal instruction */
11510 case M16_OPC_BNEQZ
:
11511 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11512 ((int8_t)ctx
->opcode
) << 1, 0);
11513 /* No delay slot, so just process as a normal instruction */
11515 case M16_OPC_SHIFT
:
11516 switch (ctx
->opcode
& 0x3) {
11518 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11521 #if defined(TARGET_MIPS64)
11522 check_insn(ctx
, ISA_MIPS3
);
11523 check_mips_64(ctx
);
11524 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11526 generate_exception_end(ctx
, EXCP_RI
);
11530 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11533 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11537 #if defined(TARGET_MIPS64)
11539 check_insn(ctx
, ISA_MIPS3
);
11540 check_mips_64(ctx
);
11541 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11546 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11548 if ((ctx
->opcode
>> 4) & 1) {
11549 #if defined(TARGET_MIPS64)
11550 check_insn(ctx
, ISA_MIPS3
);
11551 check_mips_64(ctx
);
11552 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11554 generate_exception_end(ctx
, EXCP_RI
);
11557 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11561 case M16_OPC_ADDIU8
:
11563 int16_t imm
= (int8_t) ctx
->opcode
;
11565 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11570 int16_t imm
= (uint8_t) ctx
->opcode
;
11571 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11574 case M16_OPC_SLTIU
:
11576 int16_t imm
= (uint8_t) ctx
->opcode
;
11577 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11584 funct
= (ctx
->opcode
>> 8) & 0x7;
11587 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11588 ((int8_t)ctx
->opcode
) << 1, 0);
11591 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11592 ((int8_t)ctx
->opcode
) << 1, 0);
11595 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11598 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11599 ((int8_t)ctx
->opcode
) << 3);
11602 check_insn(ctx
, ISA_MIPS32
);
11604 int do_ra
= ctx
->opcode
& (1 << 6);
11605 int do_s0
= ctx
->opcode
& (1 << 5);
11606 int do_s1
= ctx
->opcode
& (1 << 4);
11607 int framesize
= ctx
->opcode
& 0xf;
11609 if (framesize
== 0) {
11612 framesize
= framesize
<< 3;
11615 if (ctx
->opcode
& (1 << 7)) {
11616 gen_mips16_save(ctx
, 0, 0,
11617 do_ra
, do_s0
, do_s1
, framesize
);
11619 gen_mips16_restore(ctx
, 0, 0,
11620 do_ra
, do_s0
, do_s1
, framesize
);
11626 int rz
= xlat(ctx
->opcode
& 0x7);
11628 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11629 ((ctx
->opcode
>> 5) & 0x7);
11630 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11634 reg32
= ctx
->opcode
& 0x1f;
11635 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11638 generate_exception_end(ctx
, EXCP_RI
);
11645 int16_t imm
= (uint8_t) ctx
->opcode
;
11647 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11652 int16_t imm
= (uint8_t) ctx
->opcode
;
11653 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11656 #if defined(TARGET_MIPS64)
11658 check_insn(ctx
, ISA_MIPS3
);
11659 check_mips_64(ctx
);
11660 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11664 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11667 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11670 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11673 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11676 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11679 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11682 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11684 #if defined (TARGET_MIPS64)
11686 check_insn(ctx
, ISA_MIPS3
);
11687 check_mips_64(ctx
);
11688 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11692 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11695 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11698 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11701 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11705 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11708 switch (ctx
->opcode
& 0x3) {
11710 mips32_op
= OPC_ADDU
;
11713 mips32_op
= OPC_SUBU
;
11715 #if defined(TARGET_MIPS64)
11717 mips32_op
= OPC_DADDU
;
11718 check_insn(ctx
, ISA_MIPS3
);
11719 check_mips_64(ctx
);
11722 mips32_op
= OPC_DSUBU
;
11723 check_insn(ctx
, ISA_MIPS3
);
11724 check_mips_64(ctx
);
11728 generate_exception_end(ctx
, EXCP_RI
);
11732 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11741 int nd
= (ctx
->opcode
>> 7) & 0x1;
11742 int link
= (ctx
->opcode
>> 6) & 0x1;
11743 int ra
= (ctx
->opcode
>> 5) & 0x1;
11746 check_insn(ctx
, ISA_MIPS32
);
11755 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11760 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11761 gen_helper_do_semihosting(cpu_env
);
11763 /* XXX: not clear which exception should be raised
11764 * when in debug mode...
11766 check_insn(ctx
, ISA_MIPS32
);
11767 generate_exception_end(ctx
, EXCP_DBp
);
11771 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11774 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11777 generate_exception_end(ctx
, EXCP_BREAK
);
11780 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11783 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11786 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11788 #if defined (TARGET_MIPS64)
11790 check_insn(ctx
, ISA_MIPS3
);
11791 check_mips_64(ctx
);
11792 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11796 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11799 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11802 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11805 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11808 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11811 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11814 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11817 check_insn(ctx
, ISA_MIPS32
);
11819 case RR_RY_CNVT_ZEB
:
11820 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11822 case RR_RY_CNVT_ZEH
:
11823 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11825 case RR_RY_CNVT_SEB
:
11826 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11828 case RR_RY_CNVT_SEH
:
11829 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11831 #if defined (TARGET_MIPS64)
11832 case RR_RY_CNVT_ZEW
:
11833 check_insn(ctx
, ISA_MIPS64
);
11834 check_mips_64(ctx
);
11835 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11837 case RR_RY_CNVT_SEW
:
11838 check_insn(ctx
, ISA_MIPS64
);
11839 check_mips_64(ctx
);
11840 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11844 generate_exception_end(ctx
, EXCP_RI
);
11849 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11851 #if defined (TARGET_MIPS64)
11853 check_insn(ctx
, ISA_MIPS3
);
11854 check_mips_64(ctx
);
11855 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11858 check_insn(ctx
, ISA_MIPS3
);
11859 check_mips_64(ctx
);
11860 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11863 check_insn(ctx
, ISA_MIPS3
);
11864 check_mips_64(ctx
);
11865 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11868 check_insn(ctx
, ISA_MIPS3
);
11869 check_mips_64(ctx
);
11870 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11874 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11877 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11880 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11883 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11885 #if defined (TARGET_MIPS64)
11887 check_insn(ctx
, ISA_MIPS3
);
11888 check_mips_64(ctx
);
11889 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11892 check_insn(ctx
, ISA_MIPS3
);
11893 check_mips_64(ctx
);
11894 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11897 check_insn(ctx
, ISA_MIPS3
);
11898 check_mips_64(ctx
);
11899 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11902 check_insn(ctx
, ISA_MIPS3
);
11903 check_mips_64(ctx
);
11904 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11908 generate_exception_end(ctx
, EXCP_RI
);
11912 case M16_OPC_EXTEND
:
11913 decode_extended_mips16_opc(env
, ctx
);
11916 #if defined(TARGET_MIPS64)
11918 funct
= (ctx
->opcode
>> 8) & 0x7;
11919 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11923 generate_exception_end(ctx
, EXCP_RI
);
11930 /* microMIPS extension to MIPS32/MIPS64 */
11933 * microMIPS32/microMIPS64 major opcodes
11935 * 1. MIPS Architecture for Programmers Volume II-B:
11936 * The microMIPS32 Instruction Set (Revision 3.05)
11938 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11940 * 2. MIPS Architecture For Programmers Volume II-A:
11941 * The MIPS64 Instruction Set (Revision 3.51)
11971 POOL32S
= 0x16, /* MIPS64 */
11972 DADDIU32
= 0x17, /* MIPS64 */
12001 /* 0x29 is reserved */
12014 /* 0x31 is reserved */
12027 SD32
= 0x36, /* MIPS64 */
12028 LD32
= 0x37, /* MIPS64 */
12030 /* 0x39 is reserved */
12046 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12056 /* POOL32A encoding of minor opcode field */
12059 /* These opcodes are distinguished only by bits 9..6; those bits are
12060 * what are recorded below. */
12097 /* The following can be distinguished by their lower 6 bits. */
12107 /* POOL32AXF encoding of minor opcode field extension */
12110 * 1. MIPS Architecture for Programmers Volume II-B:
12111 * The microMIPS32 Instruction Set (Revision 3.05)
12113 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12115 * 2. MIPS Architecture for Programmers VolumeIV-e:
12116 * The MIPS DSP Application-Specific Extension
12117 * to the microMIPS32 Architecture (Revision 2.34)
12119 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12134 /* begin of microMIPS32 DSP */
12136 /* bits 13..12 for 0x01 */
12142 /* bits 13..12 for 0x2a */
12148 /* bits 13..12 for 0x32 */
12152 /* end of microMIPS32 DSP */
12154 /* bits 15..12 for 0x2c */
12171 /* bits 15..12 for 0x34 */
12179 /* bits 15..12 for 0x3c */
12181 JR
= 0x0, /* alias */
12189 /* bits 15..12 for 0x05 */
12193 /* bits 15..12 for 0x0d */
12205 /* bits 15..12 for 0x15 */
12211 /* bits 15..12 for 0x1d */
12215 /* bits 15..12 for 0x2d */
12220 /* bits 15..12 for 0x35 */
12227 /* POOL32B encoding of minor opcode field (bits 15..12) */
12243 /* POOL32C encoding of minor opcode field (bits 15..12) */
12251 /* 0xa is reserved */
12258 /* 0x6 is reserved */
12264 /* POOL32F encoding of minor opcode field (bits 5..0) */
12267 /* These are the bit 7..6 values */
12276 /* These are the bit 8..6 values */
12301 MOVZ_FMT_05
= 0x05,
12335 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12342 /* POOL32Fxf encoding of minor opcode extension field */
12380 /* POOL32I encoding of minor opcode field (bits 25..21) */
12410 /* These overlap and are distinguished by bit16 of the instruction */
12419 /* POOL16A encoding of minor opcode field */
12426 /* POOL16B encoding of minor opcode field */
12433 /* POOL16C encoding of minor opcode field */
12453 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12473 /* POOL16D encoding of minor opcode field */
12480 /* POOL16E encoding of minor opcode field */
12487 static int mmreg (int r
)
12489 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12494 /* Used for 16-bit store instructions. */
12495 static int mmreg2 (int r
)
12497 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12502 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12503 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12504 #define uMIPS_RS2(op) uMIPS_RS(op)
12505 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12506 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12507 #define uMIPS_RS5(op) (op & 0x1f)
12509 /* Signed immediate */
12510 #define SIMM(op, start, width) \
12511 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12514 /* Zero-extended immediate */
12515 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12517 static void gen_addiur1sp(DisasContext
*ctx
)
12519 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12521 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12524 static void gen_addiur2(DisasContext
*ctx
)
12526 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12527 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12528 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12530 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12533 static void gen_addiusp(DisasContext
*ctx
)
12535 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12538 if (encoded
<= 1) {
12539 decoded
= 256 + encoded
;
12540 } else if (encoded
<= 255) {
12542 } else if (encoded
<= 509) {
12543 decoded
= encoded
- 512;
12545 decoded
= encoded
- 768;
12548 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12551 static void gen_addius5(DisasContext
*ctx
)
12553 int imm
= SIMM(ctx
->opcode
, 1, 4);
12554 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12556 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12559 static void gen_andi16(DisasContext
*ctx
)
12561 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12562 31, 32, 63, 64, 255, 32768, 65535 };
12563 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12564 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12565 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12567 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12570 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12571 int base
, int16_t offset
)
12576 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12577 generate_exception_end(ctx
, EXCP_RI
);
12581 t0
= tcg_temp_new();
12583 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12585 t1
= tcg_const_tl(reglist
);
12586 t2
= tcg_const_i32(ctx
->mem_idx
);
12588 save_cpu_state(ctx
, 1);
12591 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12594 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12596 #ifdef TARGET_MIPS64
12598 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12601 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12607 tcg_temp_free_i32(t2
);
12611 static void gen_pool16c_insn(DisasContext
*ctx
)
12613 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12614 int rs
= mmreg(ctx
->opcode
& 0x7);
12616 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12621 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12627 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12633 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12639 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12646 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12647 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12649 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12658 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12659 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12661 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12668 int reg
= ctx
->opcode
& 0x1f;
12670 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12676 int reg
= ctx
->opcode
& 0x1f;
12677 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12678 /* Let normal delay slot handling in our caller take us
12679 to the branch target. */
12684 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12685 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12689 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12690 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12694 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12698 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12701 generate_exception_end(ctx
, EXCP_BREAK
);
12704 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12705 gen_helper_do_semihosting(cpu_env
);
12707 /* XXX: not clear which exception should be raised
12708 * when in debug mode...
12710 check_insn(ctx
, ISA_MIPS32
);
12711 generate_exception_end(ctx
, EXCP_DBp
);
12714 case JRADDIUSP
+ 0:
12715 case JRADDIUSP
+ 1:
12717 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12718 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12719 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12720 /* Let normal delay slot handling in our caller take us
12721 to the branch target. */
12725 generate_exception_end(ctx
, EXCP_RI
);
12730 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12733 int rd
, rs
, re
, rt
;
12734 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12735 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12736 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12737 rd
= rd_enc
[enc_dest
];
12738 re
= re_enc
[enc_dest
];
12739 rs
= rs_rt_enc
[enc_rs
];
12740 rt
= rs_rt_enc
[enc_rt
];
12742 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12744 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12747 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12749 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12753 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12755 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12756 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12758 switch (ctx
->opcode
& 0xf) {
12760 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12763 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12767 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12768 int offset
= extract32(ctx
->opcode
, 4, 4);
12769 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12772 case R6_JRC16
: /* JRCADDIUSP */
12773 if ((ctx
->opcode
>> 4) & 1) {
12775 int imm
= extract32(ctx
->opcode
, 5, 5);
12776 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12777 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12780 int rs
= extract32(ctx
->opcode
, 5, 5);
12781 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12784 case MOVEP
... MOVEP_07
:
12785 case MOVEP_0C
... MOVEP_0F
:
12787 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12788 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12789 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12790 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12794 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12797 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12801 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12802 int offset
= extract32(ctx
->opcode
, 4, 4);
12803 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12806 case JALRC16
: /* BREAK16, SDBBP16 */
12807 switch (ctx
->opcode
& 0x3f) {
12809 case JALRC16
+ 0x20:
12811 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12816 generate_exception(ctx
, EXCP_BREAK
);
12820 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12821 gen_helper_do_semihosting(cpu_env
);
12823 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12824 generate_exception(ctx
, EXCP_RI
);
12826 generate_exception(ctx
, EXCP_DBp
);
12833 generate_exception(ctx
, EXCP_RI
);
12838 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12840 TCGv t0
= tcg_temp_new();
12841 TCGv t1
= tcg_temp_new();
12843 gen_load_gpr(t0
, base
);
12846 gen_load_gpr(t1
, index
);
12847 tcg_gen_shli_tl(t1
, t1
, 2);
12848 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12851 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12852 gen_store_gpr(t1
, rd
);
12858 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12859 int base
, int16_t offset
)
12863 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12864 generate_exception_end(ctx
, EXCP_RI
);
12868 t0
= tcg_temp_new();
12869 t1
= tcg_temp_new();
12871 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12876 generate_exception_end(ctx
, EXCP_RI
);
12879 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12880 gen_store_gpr(t1
, rd
);
12881 tcg_gen_movi_tl(t1
, 4);
12882 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12883 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12884 gen_store_gpr(t1
, rd
+1);
12887 gen_load_gpr(t1
, rd
);
12888 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12889 tcg_gen_movi_tl(t1
, 4);
12890 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12891 gen_load_gpr(t1
, rd
+1);
12892 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12894 #ifdef TARGET_MIPS64
12897 generate_exception_end(ctx
, EXCP_RI
);
12900 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12901 gen_store_gpr(t1
, rd
);
12902 tcg_gen_movi_tl(t1
, 8);
12903 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12904 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12905 gen_store_gpr(t1
, rd
+1);
12908 gen_load_gpr(t1
, rd
);
12909 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12910 tcg_gen_movi_tl(t1
, 8);
12911 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12912 gen_load_gpr(t1
, rd
+1);
12913 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12921 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12923 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12924 int minor
= (ctx
->opcode
>> 12) & 0xf;
12925 uint32_t mips32_op
;
12927 switch (extension
) {
12929 mips32_op
= OPC_TEQ
;
12932 mips32_op
= OPC_TGE
;
12935 mips32_op
= OPC_TGEU
;
12938 mips32_op
= OPC_TLT
;
12941 mips32_op
= OPC_TLTU
;
12944 mips32_op
= OPC_TNE
;
12946 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12948 #ifndef CONFIG_USER_ONLY
12951 check_cp0_enabled(ctx
);
12953 /* Treat as NOP. */
12956 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12960 check_cp0_enabled(ctx
);
12962 TCGv t0
= tcg_temp_new();
12964 gen_load_gpr(t0
, rt
);
12965 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12971 switch (minor
& 3) {
12973 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12976 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12979 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12982 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12985 goto pool32axf_invalid
;
12989 switch (minor
& 3) {
12991 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12994 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12997 goto pool32axf_invalid
;
13003 check_insn(ctx
, ISA_MIPS32R6
);
13004 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13007 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13010 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13013 mips32_op
= OPC_CLO
;
13016 mips32_op
= OPC_CLZ
;
13018 check_insn(ctx
, ISA_MIPS32
);
13019 gen_cl(ctx
, mips32_op
, rt
, rs
);
13022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13023 gen_rdhwr(ctx
, rt
, rs
, 0);
13026 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13029 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13030 mips32_op
= OPC_MULT
;
13033 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13034 mips32_op
= OPC_MULTU
;
13037 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13038 mips32_op
= OPC_DIV
;
13041 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13042 mips32_op
= OPC_DIVU
;
13045 check_insn(ctx
, ISA_MIPS32
);
13046 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13049 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13050 mips32_op
= OPC_MADD
;
13053 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13054 mips32_op
= OPC_MADDU
;
13057 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13058 mips32_op
= OPC_MSUB
;
13061 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13062 mips32_op
= OPC_MSUBU
;
13064 check_insn(ctx
, ISA_MIPS32
);
13065 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13068 goto pool32axf_invalid
;
13079 generate_exception_err(ctx
, EXCP_CpU
, 2);
13082 goto pool32axf_invalid
;
13087 case JALR
: /* JALRC */
13088 case JALR_HB
: /* JALRC_HB */
13089 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13090 /* JALRC, JALRC_HB */
13091 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13093 /* JALR, JALR_HB */
13094 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13095 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13100 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13101 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13102 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13105 goto pool32axf_invalid
;
13111 check_cp0_enabled(ctx
);
13112 check_insn(ctx
, ISA_MIPS32R2
);
13113 gen_load_srsgpr(rs
, rt
);
13116 check_cp0_enabled(ctx
);
13117 check_insn(ctx
, ISA_MIPS32R2
);
13118 gen_store_srsgpr(rs
, rt
);
13121 goto pool32axf_invalid
;
13124 #ifndef CONFIG_USER_ONLY
13128 mips32_op
= OPC_TLBP
;
13131 mips32_op
= OPC_TLBR
;
13134 mips32_op
= OPC_TLBWI
;
13137 mips32_op
= OPC_TLBWR
;
13140 mips32_op
= OPC_TLBINV
;
13143 mips32_op
= OPC_TLBINVF
;
13146 mips32_op
= OPC_WAIT
;
13149 mips32_op
= OPC_DERET
;
13152 mips32_op
= OPC_ERET
;
13154 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13157 goto pool32axf_invalid
;
13163 check_cp0_enabled(ctx
);
13165 TCGv t0
= tcg_temp_new();
13167 save_cpu_state(ctx
, 1);
13168 gen_helper_di(t0
, cpu_env
);
13169 gen_store_gpr(t0
, rs
);
13170 /* Stop translation as we may have switched the execution mode */
13171 ctx
->bstate
= BS_STOP
;
13176 check_cp0_enabled(ctx
);
13178 TCGv t0
= tcg_temp_new();
13180 save_cpu_state(ctx
, 1);
13181 gen_helper_ei(t0
, cpu_env
);
13182 gen_store_gpr(t0
, rs
);
13183 /* Stop translation as we may have switched the execution mode */
13184 ctx
->bstate
= BS_STOP
;
13189 goto pool32axf_invalid
;
13199 generate_exception_end(ctx
, EXCP_SYSCALL
);
13202 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13203 gen_helper_do_semihosting(cpu_env
);
13205 check_insn(ctx
, ISA_MIPS32
);
13206 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13207 generate_exception_end(ctx
, EXCP_RI
);
13209 generate_exception_end(ctx
, EXCP_DBp
);
13214 goto pool32axf_invalid
;
13218 switch (minor
& 3) {
13220 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13223 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13226 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13229 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13232 goto pool32axf_invalid
;
13236 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13239 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13242 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13245 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13248 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13251 goto pool32axf_invalid
;
13256 MIPS_INVAL("pool32axf");
13257 generate_exception_end(ctx
, EXCP_RI
);
13262 /* Values for microMIPS fmt field. Variable-width, depending on which
13263 formats the instruction supports. */
13282 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13284 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13285 uint32_t mips32_op
;
13287 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13288 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13289 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13291 switch (extension
) {
13292 case FLOAT_1BIT_FMT(CFC1
, 0):
13293 mips32_op
= OPC_CFC1
;
13295 case FLOAT_1BIT_FMT(CTC1
, 0):
13296 mips32_op
= OPC_CTC1
;
13298 case FLOAT_1BIT_FMT(MFC1
, 0):
13299 mips32_op
= OPC_MFC1
;
13301 case FLOAT_1BIT_FMT(MTC1
, 0):
13302 mips32_op
= OPC_MTC1
;
13304 case FLOAT_1BIT_FMT(MFHC1
, 0):
13305 mips32_op
= OPC_MFHC1
;
13307 case FLOAT_1BIT_FMT(MTHC1
, 0):
13308 mips32_op
= OPC_MTHC1
;
13310 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13313 /* Reciprocal square root */
13314 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13315 mips32_op
= OPC_RSQRT_S
;
13317 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13318 mips32_op
= OPC_RSQRT_D
;
13322 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13323 mips32_op
= OPC_SQRT_S
;
13325 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13326 mips32_op
= OPC_SQRT_D
;
13330 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13331 mips32_op
= OPC_RECIP_S
;
13333 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13334 mips32_op
= OPC_RECIP_D
;
13338 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13339 mips32_op
= OPC_FLOOR_L_S
;
13341 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13342 mips32_op
= OPC_FLOOR_L_D
;
13344 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13345 mips32_op
= OPC_FLOOR_W_S
;
13347 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13348 mips32_op
= OPC_FLOOR_W_D
;
13352 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13353 mips32_op
= OPC_CEIL_L_S
;
13355 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13356 mips32_op
= OPC_CEIL_L_D
;
13358 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13359 mips32_op
= OPC_CEIL_W_S
;
13361 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13362 mips32_op
= OPC_CEIL_W_D
;
13366 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13367 mips32_op
= OPC_TRUNC_L_S
;
13369 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13370 mips32_op
= OPC_TRUNC_L_D
;
13372 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13373 mips32_op
= OPC_TRUNC_W_S
;
13375 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13376 mips32_op
= OPC_TRUNC_W_D
;
13380 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13381 mips32_op
= OPC_ROUND_L_S
;
13383 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13384 mips32_op
= OPC_ROUND_L_D
;
13386 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13387 mips32_op
= OPC_ROUND_W_S
;
13389 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13390 mips32_op
= OPC_ROUND_W_D
;
13393 /* Integer to floating-point conversion */
13394 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13395 mips32_op
= OPC_CVT_L_S
;
13397 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13398 mips32_op
= OPC_CVT_L_D
;
13400 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13401 mips32_op
= OPC_CVT_W_S
;
13403 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13404 mips32_op
= OPC_CVT_W_D
;
13407 /* Paired-foo conversions */
13408 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13409 mips32_op
= OPC_CVT_S_PL
;
13411 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13412 mips32_op
= OPC_CVT_S_PU
;
13414 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13415 mips32_op
= OPC_CVT_PW_PS
;
13417 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13418 mips32_op
= OPC_CVT_PS_PW
;
13421 /* Floating-point moves */
13422 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13423 mips32_op
= OPC_MOV_S
;
13425 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13426 mips32_op
= OPC_MOV_D
;
13428 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13429 mips32_op
= OPC_MOV_PS
;
13432 /* Absolute value */
13433 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13434 mips32_op
= OPC_ABS_S
;
13436 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13437 mips32_op
= OPC_ABS_D
;
13439 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13440 mips32_op
= OPC_ABS_PS
;
13444 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13445 mips32_op
= OPC_NEG_S
;
13447 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13448 mips32_op
= OPC_NEG_D
;
13450 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13451 mips32_op
= OPC_NEG_PS
;
13454 /* Reciprocal square root step */
13455 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13456 mips32_op
= OPC_RSQRT1_S
;
13458 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13459 mips32_op
= OPC_RSQRT1_D
;
13461 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13462 mips32_op
= OPC_RSQRT1_PS
;
13465 /* Reciprocal step */
13466 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13467 mips32_op
= OPC_RECIP1_S
;
13469 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13470 mips32_op
= OPC_RECIP1_S
;
13472 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13473 mips32_op
= OPC_RECIP1_PS
;
13476 /* Conversions from double */
13477 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13478 mips32_op
= OPC_CVT_D_S
;
13480 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13481 mips32_op
= OPC_CVT_D_W
;
13483 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13484 mips32_op
= OPC_CVT_D_L
;
13487 /* Conversions from single */
13488 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13489 mips32_op
= OPC_CVT_S_D
;
13491 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13492 mips32_op
= OPC_CVT_S_W
;
13494 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13495 mips32_op
= OPC_CVT_S_L
;
13497 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13500 /* Conditional moves on floating-point codes */
13501 case COND_FLOAT_MOV(MOVT
, 0):
13502 case COND_FLOAT_MOV(MOVT
, 1):
13503 case COND_FLOAT_MOV(MOVT
, 2):
13504 case COND_FLOAT_MOV(MOVT
, 3):
13505 case COND_FLOAT_MOV(MOVT
, 4):
13506 case COND_FLOAT_MOV(MOVT
, 5):
13507 case COND_FLOAT_MOV(MOVT
, 6):
13508 case COND_FLOAT_MOV(MOVT
, 7):
13509 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13510 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13512 case COND_FLOAT_MOV(MOVF
, 0):
13513 case COND_FLOAT_MOV(MOVF
, 1):
13514 case COND_FLOAT_MOV(MOVF
, 2):
13515 case COND_FLOAT_MOV(MOVF
, 3):
13516 case COND_FLOAT_MOV(MOVF
, 4):
13517 case COND_FLOAT_MOV(MOVF
, 5):
13518 case COND_FLOAT_MOV(MOVF
, 6):
13519 case COND_FLOAT_MOV(MOVF
, 7):
13520 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13521 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13524 MIPS_INVAL("pool32fxf");
13525 generate_exception_end(ctx
, EXCP_RI
);
13530 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13534 int rt
, rs
, rd
, rr
;
13536 uint32_t op
, minor
, mips32_op
;
13537 uint32_t cond
, fmt
, cc
;
13539 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13540 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13542 rt
= (ctx
->opcode
>> 21) & 0x1f;
13543 rs
= (ctx
->opcode
>> 16) & 0x1f;
13544 rd
= (ctx
->opcode
>> 11) & 0x1f;
13545 rr
= (ctx
->opcode
>> 6) & 0x1f;
13546 imm
= (int16_t) ctx
->opcode
;
13548 op
= (ctx
->opcode
>> 26) & 0x3f;
13551 minor
= ctx
->opcode
& 0x3f;
13554 minor
= (ctx
->opcode
>> 6) & 0xf;
13557 mips32_op
= OPC_SLL
;
13560 mips32_op
= OPC_SRA
;
13563 mips32_op
= OPC_SRL
;
13566 mips32_op
= OPC_ROTR
;
13568 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13571 check_insn(ctx
, ISA_MIPS32R6
);
13572 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13575 check_insn(ctx
, ISA_MIPS32R6
);
13576 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13579 check_insn(ctx
, ISA_MIPS32R6
);
13580 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13583 goto pool32a_invalid
;
13587 minor
= (ctx
->opcode
>> 6) & 0xf;
13591 mips32_op
= OPC_ADD
;
13594 mips32_op
= OPC_ADDU
;
13597 mips32_op
= OPC_SUB
;
13600 mips32_op
= OPC_SUBU
;
13603 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13604 mips32_op
= OPC_MUL
;
13606 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13610 mips32_op
= OPC_SLLV
;
13613 mips32_op
= OPC_SRLV
;
13616 mips32_op
= OPC_SRAV
;
13619 mips32_op
= OPC_ROTRV
;
13621 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13623 /* Logical operations */
13625 mips32_op
= OPC_AND
;
13628 mips32_op
= OPC_OR
;
13631 mips32_op
= OPC_NOR
;
13634 mips32_op
= OPC_XOR
;
13636 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13638 /* Set less than */
13640 mips32_op
= OPC_SLT
;
13643 mips32_op
= OPC_SLTU
;
13645 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13648 goto pool32a_invalid
;
13652 minor
= (ctx
->opcode
>> 6) & 0xf;
13654 /* Conditional moves */
13655 case MOVN
: /* MUL */
13656 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13658 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13661 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13664 case MOVZ
: /* MUH */
13665 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13667 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13670 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13674 check_insn(ctx
, ISA_MIPS32R6
);
13675 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13678 check_insn(ctx
, ISA_MIPS32R6
);
13679 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13681 case LWXS
: /* DIV */
13682 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13684 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13687 gen_ldxs(ctx
, rs
, rt
, rd
);
13691 check_insn(ctx
, ISA_MIPS32R6
);
13692 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13695 check_insn(ctx
, ISA_MIPS32R6
);
13696 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13699 check_insn(ctx
, ISA_MIPS32R6
);
13700 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13703 goto pool32a_invalid
;
13707 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13710 check_insn(ctx
, ISA_MIPS32R6
);
13711 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13712 extract32(ctx
->opcode
, 9, 2));
13715 check_insn(ctx
, ISA_MIPS32R6
);
13716 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13717 extract32(ctx
->opcode
, 9, 2));
13720 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13723 gen_pool32axf(env
, ctx
, rt
, rs
);
13726 generate_exception_end(ctx
, EXCP_BREAK
);
13729 check_insn(ctx
, ISA_MIPS32R6
);
13730 generate_exception_end(ctx
, EXCP_RI
);
13734 MIPS_INVAL("pool32a");
13735 generate_exception_end(ctx
, EXCP_RI
);
13740 minor
= (ctx
->opcode
>> 12) & 0xf;
13743 check_cp0_enabled(ctx
);
13744 /* Treat as no-op. */
13748 /* COP2: Not implemented. */
13749 generate_exception_err(ctx
, EXCP_CpU
, 2);
13751 #ifdef TARGET_MIPS64
13754 check_insn(ctx
, ISA_MIPS3
);
13755 check_mips_64(ctx
);
13760 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13762 #ifdef TARGET_MIPS64
13765 check_insn(ctx
, ISA_MIPS3
);
13766 check_mips_64(ctx
);
13771 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13774 MIPS_INVAL("pool32b");
13775 generate_exception_end(ctx
, EXCP_RI
);
13780 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13781 minor
= ctx
->opcode
& 0x3f;
13782 check_cp1_enabled(ctx
);
13785 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13786 mips32_op
= OPC_ALNV_PS
;
13789 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13790 mips32_op
= OPC_MADD_S
;
13793 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13794 mips32_op
= OPC_MADD_D
;
13797 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13798 mips32_op
= OPC_MADD_PS
;
13801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13802 mips32_op
= OPC_MSUB_S
;
13805 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13806 mips32_op
= OPC_MSUB_D
;
13809 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13810 mips32_op
= OPC_MSUB_PS
;
13813 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13814 mips32_op
= OPC_NMADD_S
;
13817 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13818 mips32_op
= OPC_NMADD_D
;
13821 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13822 mips32_op
= OPC_NMADD_PS
;
13825 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13826 mips32_op
= OPC_NMSUB_S
;
13829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13830 mips32_op
= OPC_NMSUB_D
;
13833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13834 mips32_op
= OPC_NMSUB_PS
;
13836 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13838 case CABS_COND_FMT
:
13839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13840 cond
= (ctx
->opcode
>> 6) & 0xf;
13841 cc
= (ctx
->opcode
>> 13) & 0x7;
13842 fmt
= (ctx
->opcode
>> 10) & 0x3;
13845 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13848 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13851 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13854 goto pool32f_invalid
;
13858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13859 cond
= (ctx
->opcode
>> 6) & 0xf;
13860 cc
= (ctx
->opcode
>> 13) & 0x7;
13861 fmt
= (ctx
->opcode
>> 10) & 0x3;
13864 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13867 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13870 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13873 goto pool32f_invalid
;
13877 check_insn(ctx
, ISA_MIPS32R6
);
13878 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13881 check_insn(ctx
, ISA_MIPS32R6
);
13882 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13885 gen_pool32fxf(ctx
, rt
, rs
);
13889 switch ((ctx
->opcode
>> 6) & 0x7) {
13891 mips32_op
= OPC_PLL_PS
;
13894 mips32_op
= OPC_PLU_PS
;
13897 mips32_op
= OPC_PUL_PS
;
13900 mips32_op
= OPC_PUU_PS
;
13903 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13904 mips32_op
= OPC_CVT_PS_S
;
13906 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13909 goto pool32f_invalid
;
13913 check_insn(ctx
, ISA_MIPS32R6
);
13914 switch ((ctx
->opcode
>> 9) & 0x3) {
13916 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13919 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13922 goto pool32f_invalid
;
13927 switch ((ctx
->opcode
>> 6) & 0x7) {
13929 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13930 mips32_op
= OPC_LWXC1
;
13933 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13934 mips32_op
= OPC_SWXC1
;
13937 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13938 mips32_op
= OPC_LDXC1
;
13941 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13942 mips32_op
= OPC_SDXC1
;
13945 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13946 mips32_op
= OPC_LUXC1
;
13949 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13950 mips32_op
= OPC_SUXC1
;
13952 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13955 goto pool32f_invalid
;
13959 check_insn(ctx
, ISA_MIPS32R6
);
13960 switch ((ctx
->opcode
>> 9) & 0x3) {
13962 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13965 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13968 goto pool32f_invalid
;
13973 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13974 fmt
= (ctx
->opcode
>> 9) & 0x3;
13975 switch ((ctx
->opcode
>> 6) & 0x7) {
13979 mips32_op
= OPC_RSQRT2_S
;
13982 mips32_op
= OPC_RSQRT2_D
;
13985 mips32_op
= OPC_RSQRT2_PS
;
13988 goto pool32f_invalid
;
13994 mips32_op
= OPC_RECIP2_S
;
13997 mips32_op
= OPC_RECIP2_D
;
14000 mips32_op
= OPC_RECIP2_PS
;
14003 goto pool32f_invalid
;
14007 mips32_op
= OPC_ADDR_PS
;
14010 mips32_op
= OPC_MULR_PS
;
14012 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14015 goto pool32f_invalid
;
14019 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14020 cc
= (ctx
->opcode
>> 13) & 0x7;
14021 fmt
= (ctx
->opcode
>> 9) & 0x3;
14022 switch ((ctx
->opcode
>> 6) & 0x7) {
14023 case MOVF_FMT
: /* RINT_FMT */
14024 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14028 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14031 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14034 goto pool32f_invalid
;
14040 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14043 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14047 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14050 goto pool32f_invalid
;
14054 case MOVT_FMT
: /* CLASS_FMT */
14055 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14059 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14062 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14065 goto pool32f_invalid
;
14071 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14074 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14078 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14081 goto pool32f_invalid
;
14086 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14089 goto pool32f_invalid
;
14092 #define FINSN_3ARG_SDPS(prfx) \
14093 switch ((ctx->opcode >> 8) & 0x3) { \
14095 mips32_op = OPC_##prfx##_S; \
14098 mips32_op = OPC_##prfx##_D; \
14100 case FMT_SDPS_PS: \
14102 mips32_op = OPC_##prfx##_PS; \
14105 goto pool32f_invalid; \
14108 check_insn(ctx
, ISA_MIPS32R6
);
14109 switch ((ctx
->opcode
>> 9) & 0x3) {
14111 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14114 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14117 goto pool32f_invalid
;
14121 check_insn(ctx
, ISA_MIPS32R6
);
14122 switch ((ctx
->opcode
>> 9) & 0x3) {
14124 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14127 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14130 goto pool32f_invalid
;
14134 /* regular FP ops */
14135 switch ((ctx
->opcode
>> 6) & 0x3) {
14137 FINSN_3ARG_SDPS(ADD
);
14140 FINSN_3ARG_SDPS(SUB
);
14143 FINSN_3ARG_SDPS(MUL
);
14146 fmt
= (ctx
->opcode
>> 8) & 0x3;
14148 mips32_op
= OPC_DIV_D
;
14149 } else if (fmt
== 0) {
14150 mips32_op
= OPC_DIV_S
;
14152 goto pool32f_invalid
;
14156 goto pool32f_invalid
;
14161 switch ((ctx
->opcode
>> 6) & 0x7) {
14162 case MOVN_FMT
: /* SELNEZ_FMT */
14163 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14165 switch ((ctx
->opcode
>> 9) & 0x3) {
14167 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14170 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14173 goto pool32f_invalid
;
14177 FINSN_3ARG_SDPS(MOVN
);
14181 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14182 FINSN_3ARG_SDPS(MOVN
);
14184 case MOVZ_FMT
: /* SELEQZ_FMT */
14185 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14187 switch ((ctx
->opcode
>> 9) & 0x3) {
14189 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14192 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14195 goto pool32f_invalid
;
14199 FINSN_3ARG_SDPS(MOVZ
);
14203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14204 FINSN_3ARG_SDPS(MOVZ
);
14207 check_insn(ctx
, ISA_MIPS32R6
);
14208 switch ((ctx
->opcode
>> 9) & 0x3) {
14210 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14213 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14216 goto pool32f_invalid
;
14220 check_insn(ctx
, ISA_MIPS32R6
);
14221 switch ((ctx
->opcode
>> 9) & 0x3) {
14223 mips32_op
= OPC_MADDF_S
;
14226 mips32_op
= OPC_MADDF_D
;
14229 goto pool32f_invalid
;
14233 check_insn(ctx
, ISA_MIPS32R6
);
14234 switch ((ctx
->opcode
>> 9) & 0x3) {
14236 mips32_op
= OPC_MSUBF_S
;
14239 mips32_op
= OPC_MSUBF_D
;
14242 goto pool32f_invalid
;
14246 goto pool32f_invalid
;
14250 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14254 MIPS_INVAL("pool32f");
14255 generate_exception_end(ctx
, EXCP_RI
);
14259 generate_exception_err(ctx
, EXCP_CpU
, 1);
14263 minor
= (ctx
->opcode
>> 21) & 0x1f;
14266 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14267 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14270 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14271 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14272 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14276 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14277 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14281 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14284 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14285 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14286 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14290 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14291 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14294 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14295 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14298 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14299 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14303 case TLTI
: /* BC1EQZC */
14304 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14306 check_cp1_enabled(ctx
);
14307 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14310 mips32_op
= OPC_TLTI
;
14314 case TGEI
: /* BC1NEZC */
14315 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14317 check_cp1_enabled(ctx
);
14318 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14321 mips32_op
= OPC_TGEI
;
14326 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14327 mips32_op
= OPC_TLTIU
;
14330 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14331 mips32_op
= OPC_TGEIU
;
14333 case TNEI
: /* SYNCI */
14334 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14336 /* Break the TB to be able to sync copied instructions
14338 ctx
->bstate
= BS_STOP
;
14341 mips32_op
= OPC_TNEI
;
14346 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14347 mips32_op
= OPC_TEQI
;
14349 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14355 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14356 4, rs
, 0, imm
<< 1, 0);
14357 /* Compact branches don't have a delay slot, so just let
14358 the normal delay slot handling take us to the branch
14362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14363 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14367 /* Break the TB to be able to sync copied instructions
14369 ctx
->bstate
= BS_STOP
;
14373 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14374 /* COP2: Not implemented. */
14375 generate_exception_err(ctx
, EXCP_CpU
, 2);
14378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14379 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14382 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14383 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14386 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14387 mips32_op
= OPC_BC1FANY4
;
14390 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14391 mips32_op
= OPC_BC1TANY4
;
14394 check_insn(ctx
, ASE_MIPS3D
);
14397 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14398 check_cp1_enabled(ctx
);
14399 gen_compute_branch1(ctx
, mips32_op
,
14400 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14402 generate_exception_err(ctx
, EXCP_CpU
, 1);
14407 /* MIPS DSP: not implemented */
14410 MIPS_INVAL("pool32i");
14411 generate_exception_end(ctx
, EXCP_RI
);
14416 minor
= (ctx
->opcode
>> 12) & 0xf;
14417 offset
= sextract32(ctx
->opcode
, 0,
14418 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14421 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14422 mips32_op
= OPC_LWL
;
14425 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14426 mips32_op
= OPC_SWL
;
14429 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14430 mips32_op
= OPC_LWR
;
14433 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14434 mips32_op
= OPC_SWR
;
14436 #if defined(TARGET_MIPS64)
14438 check_insn(ctx
, ISA_MIPS3
);
14439 check_mips_64(ctx
);
14440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14441 mips32_op
= OPC_LDL
;
14444 check_insn(ctx
, ISA_MIPS3
);
14445 check_mips_64(ctx
);
14446 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14447 mips32_op
= OPC_SDL
;
14450 check_insn(ctx
, ISA_MIPS3
);
14451 check_mips_64(ctx
);
14452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14453 mips32_op
= OPC_LDR
;
14456 check_insn(ctx
, ISA_MIPS3
);
14457 check_mips_64(ctx
);
14458 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14459 mips32_op
= OPC_SDR
;
14462 check_insn(ctx
, ISA_MIPS3
);
14463 check_mips_64(ctx
);
14464 mips32_op
= OPC_LWU
;
14467 check_insn(ctx
, ISA_MIPS3
);
14468 check_mips_64(ctx
);
14469 mips32_op
= OPC_LLD
;
14473 mips32_op
= OPC_LL
;
14476 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14479 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14482 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14484 #if defined(TARGET_MIPS64)
14486 check_insn(ctx
, ISA_MIPS3
);
14487 check_mips_64(ctx
);
14488 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14492 /* Treat as no-op */
14493 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14494 /* hint codes 24-31 are reserved and signal RI */
14495 generate_exception(ctx
, EXCP_RI
);
14499 MIPS_INVAL("pool32c");
14500 generate_exception_end(ctx
, EXCP_RI
);
14504 case ADDI32
: /* AUI, LUI */
14505 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14507 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14510 mips32_op
= OPC_ADDI
;
14515 mips32_op
= OPC_ADDIU
;
14517 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14520 /* Logical operations */
14522 mips32_op
= OPC_ORI
;
14525 mips32_op
= OPC_XORI
;
14528 mips32_op
= OPC_ANDI
;
14530 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14533 /* Set less than immediate */
14535 mips32_op
= OPC_SLTI
;
14538 mips32_op
= OPC_SLTIU
;
14540 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14543 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14544 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14545 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14546 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14548 case JALS32
: /* BOVC, BEQC, BEQZALC */
14549 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14552 mips32_op
= OPC_BOVC
;
14553 } else if (rs
< rt
&& rs
== 0) {
14555 mips32_op
= OPC_BEQZALC
;
14558 mips32_op
= OPC_BEQC
;
14560 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14563 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14564 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14565 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14568 case BEQ32
: /* BC */
14569 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14571 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14572 sextract32(ctx
->opcode
<< 1, 0, 27));
14575 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14578 case BNE32
: /* BALC */
14579 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14581 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14582 sextract32(ctx
->opcode
<< 1, 0, 27));
14585 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14588 case J32
: /* BGTZC, BLTZC, BLTC */
14589 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14590 if (rs
== 0 && rt
!= 0) {
14592 mips32_op
= OPC_BGTZC
;
14593 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14595 mips32_op
= OPC_BLTZC
;
14598 mips32_op
= OPC_BLTC
;
14600 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14603 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14604 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14607 case JAL32
: /* BLEZC, BGEZC, BGEC */
14608 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14609 if (rs
== 0 && rt
!= 0) {
14611 mips32_op
= OPC_BLEZC
;
14612 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14614 mips32_op
= OPC_BGEZC
;
14617 mips32_op
= OPC_BGEC
;
14619 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14622 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14623 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14624 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14627 /* Floating point (COP1) */
14629 mips32_op
= OPC_LWC1
;
14632 mips32_op
= OPC_LDC1
;
14635 mips32_op
= OPC_SWC1
;
14638 mips32_op
= OPC_SDC1
;
14640 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14642 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14643 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14644 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14645 switch ((ctx
->opcode
>> 16) & 0x1f) {
14646 case ADDIUPC_00
... ADDIUPC_07
:
14647 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14650 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14653 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14655 case LWPC_08
... LWPC_0F
:
14656 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14659 generate_exception(ctx
, EXCP_RI
);
14664 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14665 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14667 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14670 case BNVC
: /* BNEC, BNEZALC */
14671 check_insn(ctx
, ISA_MIPS32R6
);
14674 mips32_op
= OPC_BNVC
;
14675 } else if (rs
< rt
&& rs
== 0) {
14677 mips32_op
= OPC_BNEZALC
;
14680 mips32_op
= OPC_BNEC
;
14682 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14684 case R6_BNEZC
: /* JIALC */
14685 check_insn(ctx
, ISA_MIPS32R6
);
14688 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14689 sextract32(ctx
->opcode
<< 1, 0, 22));
14692 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14695 case R6_BEQZC
: /* JIC */
14696 check_insn(ctx
, ISA_MIPS32R6
);
14699 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14700 sextract32(ctx
->opcode
<< 1, 0, 22));
14703 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14706 case BLEZALC
: /* BGEZALC, BGEUC */
14707 check_insn(ctx
, ISA_MIPS32R6
);
14708 if (rs
== 0 && rt
!= 0) {
14710 mips32_op
= OPC_BLEZALC
;
14711 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14713 mips32_op
= OPC_BGEZALC
;
14716 mips32_op
= OPC_BGEUC
;
14718 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14720 case BGTZALC
: /* BLTZALC, BLTUC */
14721 check_insn(ctx
, ISA_MIPS32R6
);
14722 if (rs
== 0 && rt
!= 0) {
14724 mips32_op
= OPC_BGTZALC
;
14725 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14727 mips32_op
= OPC_BLTZALC
;
14730 mips32_op
= OPC_BLTUC
;
14732 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14734 /* Loads and stores */
14736 mips32_op
= OPC_LB
;
14739 mips32_op
= OPC_LBU
;
14742 mips32_op
= OPC_LH
;
14745 mips32_op
= OPC_LHU
;
14748 mips32_op
= OPC_LW
;
14750 #ifdef TARGET_MIPS64
14752 check_insn(ctx
, ISA_MIPS3
);
14753 check_mips_64(ctx
);
14754 mips32_op
= OPC_LD
;
14757 check_insn(ctx
, ISA_MIPS3
);
14758 check_mips_64(ctx
);
14759 mips32_op
= OPC_SD
;
14763 mips32_op
= OPC_SB
;
14766 mips32_op
= OPC_SH
;
14769 mips32_op
= OPC_SW
;
14772 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14775 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14778 generate_exception_end(ctx
, EXCP_RI
);
14783 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14787 /* make sure instructions are on a halfword boundary */
14788 if (ctx
->pc
& 0x1) {
14789 env
->CP0_BadVAddr
= ctx
->pc
;
14790 generate_exception_end(ctx
, EXCP_AdEL
);
14794 op
= (ctx
->opcode
>> 10) & 0x3f;
14795 /* Enforce properly-sized instructions in a delay slot */
14796 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14797 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14799 /* POOL32A, POOL32B, POOL32I, POOL32C */
14801 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14803 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14805 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14807 /* LB32, LH32, LWC132, LDC132, LW32 */
14808 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14809 generate_exception_end(ctx
, EXCP_RI
);
14814 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14816 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14818 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14819 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14820 generate_exception_end(ctx
, EXCP_RI
);
14830 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14831 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14832 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14835 switch (ctx
->opcode
& 0x1) {
14843 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14844 /* In the Release 6 the register number location in
14845 * the instruction encoding has changed.
14847 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14849 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14855 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14856 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14857 int amount
= (ctx
->opcode
>> 1) & 0x7;
14859 amount
= amount
== 0 ? 8 : amount
;
14861 switch (ctx
->opcode
& 0x1) {
14870 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14874 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14875 gen_pool16c_r6_insn(ctx
);
14877 gen_pool16c_insn(ctx
);
14882 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14883 int rb
= 28; /* GP */
14884 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14886 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14890 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14891 if (ctx
->opcode
& 1) {
14892 generate_exception_end(ctx
, EXCP_RI
);
14895 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14896 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14897 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14898 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14903 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14904 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14905 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14906 offset
= (offset
== 0xf ? -1 : offset
);
14908 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14913 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14914 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14915 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14917 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14922 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14923 int rb
= 29; /* SP */
14924 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14926 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14931 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14932 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14933 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14935 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14940 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14941 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14942 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14944 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14949 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14950 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14951 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14953 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14958 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14959 int rb
= 29; /* SP */
14960 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14962 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14967 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14968 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14969 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14971 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14976 int rd
= uMIPS_RD5(ctx
->opcode
);
14977 int rs
= uMIPS_RS5(ctx
->opcode
);
14979 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14986 switch (ctx
->opcode
& 0x1) {
14996 switch (ctx
->opcode
& 0x1) {
15001 gen_addiur1sp(ctx
);
15005 case B16
: /* BC16 */
15006 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15007 sextract32(ctx
->opcode
, 0, 10) << 1,
15008 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15010 case BNEZ16
: /* BNEZC16 */
15011 case BEQZ16
: /* BEQZC16 */
15012 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15013 mmreg(uMIPS_RD(ctx
->opcode
)),
15014 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15015 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15020 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15021 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15023 imm
= (imm
== 0x7f ? -1 : imm
);
15024 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15030 generate_exception_end(ctx
, EXCP_RI
);
15033 decode_micromips32_opc(env
, ctx
);
15040 /* SmartMIPS extension to MIPS32 */
15042 #if defined(TARGET_MIPS64)
15044 /* MDMX extension to MIPS64 */
15048 /* MIPSDSP functions. */
15049 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15050 int rd
, int base
, int offset
)
15055 t0
= tcg_temp_new();
15058 gen_load_gpr(t0
, offset
);
15059 } else if (offset
== 0) {
15060 gen_load_gpr(t0
, base
);
15062 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15067 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15068 gen_store_gpr(t0
, rd
);
15071 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15072 gen_store_gpr(t0
, rd
);
15075 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15076 gen_store_gpr(t0
, rd
);
15078 #if defined(TARGET_MIPS64)
15080 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15081 gen_store_gpr(t0
, rd
);
15088 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15089 int ret
, int v1
, int v2
)
15095 /* Treat as NOP. */
15099 v1_t
= tcg_temp_new();
15100 v2_t
= tcg_temp_new();
15102 gen_load_gpr(v1_t
, v1
);
15103 gen_load_gpr(v2_t
, v2
);
15106 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15107 case OPC_MULT_G_2E
:
15111 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15113 case OPC_ADDUH_R_QB
:
15114 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15117 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15119 case OPC_ADDQH_R_PH
:
15120 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15123 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15125 case OPC_ADDQH_R_W
:
15126 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15129 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15131 case OPC_SUBUH_R_QB
:
15132 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15135 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15137 case OPC_SUBQH_R_PH
:
15138 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15141 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15143 case OPC_SUBQH_R_W
:
15144 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15148 case OPC_ABSQ_S_PH_DSP
:
15150 case OPC_ABSQ_S_QB
:
15152 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15154 case OPC_ABSQ_S_PH
:
15156 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15160 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15162 case OPC_PRECEQ_W_PHL
:
15164 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15165 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15167 case OPC_PRECEQ_W_PHR
:
15169 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15170 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15171 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15173 case OPC_PRECEQU_PH_QBL
:
15175 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15177 case OPC_PRECEQU_PH_QBR
:
15179 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15181 case OPC_PRECEQU_PH_QBLA
:
15183 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15185 case OPC_PRECEQU_PH_QBRA
:
15187 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15189 case OPC_PRECEU_PH_QBL
:
15191 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15193 case OPC_PRECEU_PH_QBR
:
15195 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15197 case OPC_PRECEU_PH_QBLA
:
15199 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15201 case OPC_PRECEU_PH_QBRA
:
15203 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15207 case OPC_ADDU_QB_DSP
:
15211 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15213 case OPC_ADDQ_S_PH
:
15215 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15219 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15223 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15225 case OPC_ADDU_S_QB
:
15227 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15231 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15233 case OPC_ADDU_S_PH
:
15235 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15239 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15241 case OPC_SUBQ_S_PH
:
15243 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15247 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15251 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15253 case OPC_SUBU_S_QB
:
15255 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15259 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15261 case OPC_SUBU_S_PH
:
15263 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15267 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15271 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15275 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15277 case OPC_RADDU_W_QB
:
15279 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15283 case OPC_CMPU_EQ_QB_DSP
:
15285 case OPC_PRECR_QB_PH
:
15287 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15289 case OPC_PRECRQ_QB_PH
:
15291 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15293 case OPC_PRECR_SRA_PH_W
:
15296 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15297 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15299 tcg_temp_free_i32(sa_t
);
15302 case OPC_PRECR_SRA_R_PH_W
:
15305 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15306 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15308 tcg_temp_free_i32(sa_t
);
15311 case OPC_PRECRQ_PH_W
:
15313 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15315 case OPC_PRECRQ_RS_PH_W
:
15317 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15319 case OPC_PRECRQU_S_QB_PH
:
15321 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15325 #ifdef TARGET_MIPS64
15326 case OPC_ABSQ_S_QH_DSP
:
15328 case OPC_PRECEQ_L_PWL
:
15330 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15332 case OPC_PRECEQ_L_PWR
:
15334 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15336 case OPC_PRECEQ_PW_QHL
:
15338 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15340 case OPC_PRECEQ_PW_QHR
:
15342 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15344 case OPC_PRECEQ_PW_QHLA
:
15346 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15348 case OPC_PRECEQ_PW_QHRA
:
15350 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15352 case OPC_PRECEQU_QH_OBL
:
15354 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15356 case OPC_PRECEQU_QH_OBR
:
15358 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15360 case OPC_PRECEQU_QH_OBLA
:
15362 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15364 case OPC_PRECEQU_QH_OBRA
:
15366 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15368 case OPC_PRECEU_QH_OBL
:
15370 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15372 case OPC_PRECEU_QH_OBR
:
15374 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15376 case OPC_PRECEU_QH_OBLA
:
15378 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15380 case OPC_PRECEU_QH_OBRA
:
15382 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15384 case OPC_ABSQ_S_OB
:
15386 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15388 case OPC_ABSQ_S_PW
:
15390 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15392 case OPC_ABSQ_S_QH
:
15394 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15398 case OPC_ADDU_OB_DSP
:
15400 case OPC_RADDU_L_OB
:
15402 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15406 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15408 case OPC_SUBQ_S_PW
:
15410 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15414 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15416 case OPC_SUBQ_S_QH
:
15418 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15422 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15424 case OPC_SUBU_S_OB
:
15426 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15430 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15432 case OPC_SUBU_S_QH
:
15434 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15438 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15440 case OPC_SUBUH_R_OB
:
15442 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15446 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15448 case OPC_ADDQ_S_PW
:
15450 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15454 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15456 case OPC_ADDQ_S_QH
:
15458 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15462 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15464 case OPC_ADDU_S_OB
:
15466 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15470 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15472 case OPC_ADDU_S_QH
:
15474 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15478 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15480 case OPC_ADDUH_R_OB
:
15482 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15486 case OPC_CMPU_EQ_OB_DSP
:
15488 case OPC_PRECR_OB_QH
:
15490 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15492 case OPC_PRECR_SRA_QH_PW
:
15495 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15496 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15497 tcg_temp_free_i32(ret_t
);
15500 case OPC_PRECR_SRA_R_QH_PW
:
15503 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15504 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15505 tcg_temp_free_i32(sa_v
);
15508 case OPC_PRECRQ_OB_QH
:
15510 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15512 case OPC_PRECRQ_PW_L
:
15514 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15516 case OPC_PRECRQ_QH_PW
:
15518 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15520 case OPC_PRECRQ_RS_QH_PW
:
15522 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15524 case OPC_PRECRQU_S_OB_QH
:
15526 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15533 tcg_temp_free(v1_t
);
15534 tcg_temp_free(v2_t
);
15537 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15538 int ret
, int v1
, int v2
)
15546 /* Treat as NOP. */
15550 t0
= tcg_temp_new();
15551 v1_t
= tcg_temp_new();
15552 v2_t
= tcg_temp_new();
15554 tcg_gen_movi_tl(t0
, v1
);
15555 gen_load_gpr(v1_t
, v1
);
15556 gen_load_gpr(v2_t
, v2
);
15559 case OPC_SHLL_QB_DSP
:
15561 op2
= MASK_SHLL_QB(ctx
->opcode
);
15565 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15569 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15573 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15577 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15579 case OPC_SHLL_S_PH
:
15581 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15583 case OPC_SHLLV_S_PH
:
15585 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15589 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15591 case OPC_SHLLV_S_W
:
15593 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15597 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15601 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15605 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15609 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15613 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15615 case OPC_SHRA_R_QB
:
15617 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15621 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15623 case OPC_SHRAV_R_QB
:
15625 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15629 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15631 case OPC_SHRA_R_PH
:
15633 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15637 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15639 case OPC_SHRAV_R_PH
:
15641 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15645 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15647 case OPC_SHRAV_R_W
:
15649 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15651 default: /* Invalid */
15652 MIPS_INVAL("MASK SHLL.QB");
15653 generate_exception_end(ctx
, EXCP_RI
);
15658 #ifdef TARGET_MIPS64
15659 case OPC_SHLL_OB_DSP
:
15660 op2
= MASK_SHLL_OB(ctx
->opcode
);
15664 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15668 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15670 case OPC_SHLL_S_PW
:
15672 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15674 case OPC_SHLLV_S_PW
:
15676 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15680 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15684 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15688 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15692 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15694 case OPC_SHLL_S_QH
:
15696 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15698 case OPC_SHLLV_S_QH
:
15700 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15704 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15708 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15710 case OPC_SHRA_R_OB
:
15712 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15714 case OPC_SHRAV_R_OB
:
15716 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15720 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15724 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15726 case OPC_SHRA_R_PW
:
15728 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15730 case OPC_SHRAV_R_PW
:
15732 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15736 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15740 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15742 case OPC_SHRA_R_QH
:
15744 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15746 case OPC_SHRAV_R_QH
:
15748 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15752 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15756 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15760 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15764 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15766 default: /* Invalid */
15767 MIPS_INVAL("MASK SHLL.OB");
15768 generate_exception_end(ctx
, EXCP_RI
);
15776 tcg_temp_free(v1_t
);
15777 tcg_temp_free(v2_t
);
15780 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15781 int ret
, int v1
, int v2
, int check_ret
)
15787 if ((ret
== 0) && (check_ret
== 1)) {
15788 /* Treat as NOP. */
15792 t0
= tcg_temp_new_i32();
15793 v1_t
= tcg_temp_new();
15794 v2_t
= tcg_temp_new();
15796 tcg_gen_movi_i32(t0
, ret
);
15797 gen_load_gpr(v1_t
, v1
);
15798 gen_load_gpr(v2_t
, v2
);
15801 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15802 * the same mask and op1. */
15803 case OPC_MULT_G_2E
:
15807 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15810 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15813 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15815 case OPC_MULQ_RS_W
:
15816 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15820 case OPC_DPA_W_PH_DSP
:
15822 case OPC_DPAU_H_QBL
:
15824 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15826 case OPC_DPAU_H_QBR
:
15828 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15830 case OPC_DPSU_H_QBL
:
15832 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15834 case OPC_DPSU_H_QBR
:
15836 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15840 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15842 case OPC_DPAX_W_PH
:
15844 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15846 case OPC_DPAQ_S_W_PH
:
15848 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15850 case OPC_DPAQX_S_W_PH
:
15852 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15854 case OPC_DPAQX_SA_W_PH
:
15856 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15860 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15862 case OPC_DPSX_W_PH
:
15864 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15866 case OPC_DPSQ_S_W_PH
:
15868 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15870 case OPC_DPSQX_S_W_PH
:
15872 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15874 case OPC_DPSQX_SA_W_PH
:
15876 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15878 case OPC_MULSAQ_S_W_PH
:
15880 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15882 case OPC_DPAQ_SA_L_W
:
15884 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15886 case OPC_DPSQ_SA_L_W
:
15888 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15890 case OPC_MAQ_S_W_PHL
:
15892 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15894 case OPC_MAQ_S_W_PHR
:
15896 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15898 case OPC_MAQ_SA_W_PHL
:
15900 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15902 case OPC_MAQ_SA_W_PHR
:
15904 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15906 case OPC_MULSA_W_PH
:
15908 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15912 #ifdef TARGET_MIPS64
15913 case OPC_DPAQ_W_QH_DSP
:
15915 int ac
= ret
& 0x03;
15916 tcg_gen_movi_i32(t0
, ac
);
15921 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15925 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15929 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15933 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15937 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15939 case OPC_DPAQ_S_W_QH
:
15941 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15943 case OPC_DPAQ_SA_L_PW
:
15945 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15947 case OPC_DPAU_H_OBL
:
15949 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15951 case OPC_DPAU_H_OBR
:
15953 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15957 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15959 case OPC_DPSQ_S_W_QH
:
15961 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15963 case OPC_DPSQ_SA_L_PW
:
15965 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15967 case OPC_DPSU_H_OBL
:
15969 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15971 case OPC_DPSU_H_OBR
:
15973 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15975 case OPC_MAQ_S_L_PWL
:
15977 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15979 case OPC_MAQ_S_L_PWR
:
15981 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15983 case OPC_MAQ_S_W_QHLL
:
15985 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15987 case OPC_MAQ_SA_W_QHLL
:
15989 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15991 case OPC_MAQ_S_W_QHLR
:
15993 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15995 case OPC_MAQ_SA_W_QHLR
:
15997 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15999 case OPC_MAQ_S_W_QHRL
:
16001 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16003 case OPC_MAQ_SA_W_QHRL
:
16005 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16007 case OPC_MAQ_S_W_QHRR
:
16009 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16011 case OPC_MAQ_SA_W_QHRR
:
16013 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16015 case OPC_MULSAQ_S_L_PW
:
16017 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16019 case OPC_MULSAQ_S_W_QH
:
16021 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16027 case OPC_ADDU_QB_DSP
:
16029 case OPC_MULEU_S_PH_QBL
:
16031 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16033 case OPC_MULEU_S_PH_QBR
:
16035 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16037 case OPC_MULQ_RS_PH
:
16039 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16041 case OPC_MULEQ_S_W_PHL
:
16043 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16045 case OPC_MULEQ_S_W_PHR
:
16047 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16049 case OPC_MULQ_S_PH
:
16051 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16055 #ifdef TARGET_MIPS64
16056 case OPC_ADDU_OB_DSP
:
16058 case OPC_MULEQ_S_PW_QHL
:
16060 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16062 case OPC_MULEQ_S_PW_QHR
:
16064 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16066 case OPC_MULEU_S_QH_OBL
:
16068 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16070 case OPC_MULEU_S_QH_OBR
:
16072 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16074 case OPC_MULQ_RS_QH
:
16076 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16083 tcg_temp_free_i32(t0
);
16084 tcg_temp_free(v1_t
);
16085 tcg_temp_free(v2_t
);
16088 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16096 /* Treat as NOP. */
16100 t0
= tcg_temp_new();
16101 val_t
= tcg_temp_new();
16102 gen_load_gpr(val_t
, val
);
16105 case OPC_ABSQ_S_PH_DSP
:
16109 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16114 target_long result
;
16115 imm
= (ctx
->opcode
>> 16) & 0xFF;
16116 result
= (uint32_t)imm
<< 24 |
16117 (uint32_t)imm
<< 16 |
16118 (uint32_t)imm
<< 8 |
16120 result
= (int32_t)result
;
16121 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16126 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16127 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16128 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16129 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16130 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16131 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16136 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16137 imm
= (int16_t)(imm
<< 6) >> 6;
16138 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16139 (target_long
)((int32_t)imm
<< 16 | \
16145 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16146 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16147 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16148 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16152 #ifdef TARGET_MIPS64
16153 case OPC_ABSQ_S_QH_DSP
:
16160 imm
= (ctx
->opcode
>> 16) & 0xFF;
16161 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16162 temp
= (temp
<< 16) | temp
;
16163 temp
= (temp
<< 32) | temp
;
16164 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16172 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16173 imm
= (int16_t)(imm
<< 6) >> 6;
16174 temp
= ((target_long
)imm
<< 32) \
16175 | ((target_long
)imm
& 0xFFFFFFFF);
16176 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16184 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16185 imm
= (int16_t)(imm
<< 6) >> 6;
16187 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16188 ((uint64_t)(uint16_t)imm
<< 32) |
16189 ((uint64_t)(uint16_t)imm
<< 16) |
16190 (uint64_t)(uint16_t)imm
;
16191 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16196 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16197 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16198 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16199 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16200 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16201 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16202 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16206 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16207 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16208 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16212 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16213 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16214 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16215 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16216 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16223 tcg_temp_free(val_t
);
16226 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16227 uint32_t op1
, uint32_t op2
,
16228 int ret
, int v1
, int v2
, int check_ret
)
16234 if ((ret
== 0) && (check_ret
== 1)) {
16235 /* Treat as NOP. */
16239 t1
= tcg_temp_new();
16240 v1_t
= tcg_temp_new();
16241 v2_t
= tcg_temp_new();
16243 gen_load_gpr(v1_t
, v1
);
16244 gen_load_gpr(v2_t
, v2
);
16247 case OPC_CMPU_EQ_QB_DSP
:
16249 case OPC_CMPU_EQ_QB
:
16251 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16253 case OPC_CMPU_LT_QB
:
16255 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16257 case OPC_CMPU_LE_QB
:
16259 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16261 case OPC_CMPGU_EQ_QB
:
16263 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16265 case OPC_CMPGU_LT_QB
:
16267 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16269 case OPC_CMPGU_LE_QB
:
16271 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16273 case OPC_CMPGDU_EQ_QB
:
16275 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16276 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16277 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16278 tcg_gen_shli_tl(t1
, t1
, 24);
16279 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16281 case OPC_CMPGDU_LT_QB
:
16283 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16284 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16285 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16286 tcg_gen_shli_tl(t1
, t1
, 24);
16287 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16289 case OPC_CMPGDU_LE_QB
:
16291 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16292 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16293 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16294 tcg_gen_shli_tl(t1
, t1
, 24);
16295 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16297 case OPC_CMP_EQ_PH
:
16299 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16301 case OPC_CMP_LT_PH
:
16303 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16305 case OPC_CMP_LE_PH
:
16307 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16311 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16315 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16317 case OPC_PACKRL_PH
:
16319 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16323 #ifdef TARGET_MIPS64
16324 case OPC_CMPU_EQ_OB_DSP
:
16326 case OPC_CMP_EQ_PW
:
16328 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16330 case OPC_CMP_LT_PW
:
16332 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16334 case OPC_CMP_LE_PW
:
16336 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16338 case OPC_CMP_EQ_QH
:
16340 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16342 case OPC_CMP_LT_QH
:
16344 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16346 case OPC_CMP_LE_QH
:
16348 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16350 case OPC_CMPGDU_EQ_OB
:
16352 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16354 case OPC_CMPGDU_LT_OB
:
16356 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16358 case OPC_CMPGDU_LE_OB
:
16360 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16362 case OPC_CMPGU_EQ_OB
:
16364 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16366 case OPC_CMPGU_LT_OB
:
16368 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16370 case OPC_CMPGU_LE_OB
:
16372 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16374 case OPC_CMPU_EQ_OB
:
16376 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16378 case OPC_CMPU_LT_OB
:
16380 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16382 case OPC_CMPU_LE_OB
:
16384 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16386 case OPC_PACKRL_PW
:
16388 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16392 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16396 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16400 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16408 tcg_temp_free(v1_t
);
16409 tcg_temp_free(v2_t
);
16412 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16413 uint32_t op1
, int rt
, int rs
, int sa
)
16420 /* Treat as NOP. */
16424 t0
= tcg_temp_new();
16425 gen_load_gpr(t0
, rs
);
16428 case OPC_APPEND_DSP
:
16429 switch (MASK_APPEND(ctx
->opcode
)) {
16432 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16434 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16438 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16439 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16440 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16441 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16443 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16447 if (sa
!= 0 && sa
!= 2) {
16448 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16449 tcg_gen_ext32u_tl(t0
, t0
);
16450 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16451 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16453 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16455 default: /* Invalid */
16456 MIPS_INVAL("MASK APPEND");
16457 generate_exception_end(ctx
, EXCP_RI
);
16461 #ifdef TARGET_MIPS64
16462 case OPC_DAPPEND_DSP
:
16463 switch (MASK_DAPPEND(ctx
->opcode
)) {
16466 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16470 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16471 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16472 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16476 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16477 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16478 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16483 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16484 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16485 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16486 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16489 default: /* Invalid */
16490 MIPS_INVAL("MASK DAPPEND");
16491 generate_exception_end(ctx
, EXCP_RI
);
16500 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16501 int ret
, int v1
, int v2
, int check_ret
)
16510 if ((ret
== 0) && (check_ret
== 1)) {
16511 /* Treat as NOP. */
16515 t0
= tcg_temp_new();
16516 t1
= tcg_temp_new();
16517 v1_t
= tcg_temp_new();
16518 v2_t
= tcg_temp_new();
16520 gen_load_gpr(v1_t
, v1
);
16521 gen_load_gpr(v2_t
, v2
);
16524 case OPC_EXTR_W_DSP
:
16528 tcg_gen_movi_tl(t0
, v2
);
16529 tcg_gen_movi_tl(t1
, v1
);
16530 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16533 tcg_gen_movi_tl(t0
, v2
);
16534 tcg_gen_movi_tl(t1
, v1
);
16535 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16537 case OPC_EXTR_RS_W
:
16538 tcg_gen_movi_tl(t0
, v2
);
16539 tcg_gen_movi_tl(t1
, v1
);
16540 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16543 tcg_gen_movi_tl(t0
, v2
);
16544 tcg_gen_movi_tl(t1
, v1
);
16545 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16547 case OPC_EXTRV_S_H
:
16548 tcg_gen_movi_tl(t0
, v2
);
16549 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16552 tcg_gen_movi_tl(t0
, v2
);
16553 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16555 case OPC_EXTRV_R_W
:
16556 tcg_gen_movi_tl(t0
, v2
);
16557 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16559 case OPC_EXTRV_RS_W
:
16560 tcg_gen_movi_tl(t0
, v2
);
16561 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16564 tcg_gen_movi_tl(t0
, v2
);
16565 tcg_gen_movi_tl(t1
, v1
);
16566 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16569 tcg_gen_movi_tl(t0
, v2
);
16570 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16573 tcg_gen_movi_tl(t0
, v2
);
16574 tcg_gen_movi_tl(t1
, v1
);
16575 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16578 tcg_gen_movi_tl(t0
, v2
);
16579 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16582 imm
= (ctx
->opcode
>> 20) & 0x3F;
16583 tcg_gen_movi_tl(t0
, ret
);
16584 tcg_gen_movi_tl(t1
, imm
);
16585 gen_helper_shilo(t0
, t1
, cpu_env
);
16588 tcg_gen_movi_tl(t0
, ret
);
16589 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16592 tcg_gen_movi_tl(t0
, ret
);
16593 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16596 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16597 tcg_gen_movi_tl(t0
, imm
);
16598 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16601 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16602 tcg_gen_movi_tl(t0
, imm
);
16603 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16607 #ifdef TARGET_MIPS64
16608 case OPC_DEXTR_W_DSP
:
16612 tcg_gen_movi_tl(t0
, ret
);
16613 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16617 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16618 int ac
= (ctx
->opcode
>> 11) & 0x03;
16619 tcg_gen_movi_tl(t0
, shift
);
16620 tcg_gen_movi_tl(t1
, ac
);
16621 gen_helper_dshilo(t0
, t1
, cpu_env
);
16626 int ac
= (ctx
->opcode
>> 11) & 0x03;
16627 tcg_gen_movi_tl(t0
, ac
);
16628 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16632 tcg_gen_movi_tl(t0
, v2
);
16633 tcg_gen_movi_tl(t1
, v1
);
16635 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16638 tcg_gen_movi_tl(t0
, v2
);
16639 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16642 tcg_gen_movi_tl(t0
, v2
);
16643 tcg_gen_movi_tl(t1
, v1
);
16644 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16647 tcg_gen_movi_tl(t0
, v2
);
16648 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16651 tcg_gen_movi_tl(t0
, v2
);
16652 tcg_gen_movi_tl(t1
, v1
);
16653 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16655 case OPC_DEXTR_R_L
:
16656 tcg_gen_movi_tl(t0
, v2
);
16657 tcg_gen_movi_tl(t1
, v1
);
16658 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16660 case OPC_DEXTR_RS_L
:
16661 tcg_gen_movi_tl(t0
, v2
);
16662 tcg_gen_movi_tl(t1
, v1
);
16663 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16666 tcg_gen_movi_tl(t0
, v2
);
16667 tcg_gen_movi_tl(t1
, v1
);
16668 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16670 case OPC_DEXTR_R_W
:
16671 tcg_gen_movi_tl(t0
, v2
);
16672 tcg_gen_movi_tl(t1
, v1
);
16673 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16675 case OPC_DEXTR_RS_W
:
16676 tcg_gen_movi_tl(t0
, v2
);
16677 tcg_gen_movi_tl(t1
, v1
);
16678 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16680 case OPC_DEXTR_S_H
:
16681 tcg_gen_movi_tl(t0
, v2
);
16682 tcg_gen_movi_tl(t1
, v1
);
16683 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16685 case OPC_DEXTRV_S_H
:
16686 tcg_gen_movi_tl(t0
, v2
);
16687 tcg_gen_movi_tl(t1
, v1
);
16688 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16691 tcg_gen_movi_tl(t0
, v2
);
16692 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16694 case OPC_DEXTRV_R_L
:
16695 tcg_gen_movi_tl(t0
, v2
);
16696 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16698 case OPC_DEXTRV_RS_L
:
16699 tcg_gen_movi_tl(t0
, v2
);
16700 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16703 tcg_gen_movi_tl(t0
, v2
);
16704 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16706 case OPC_DEXTRV_R_W
:
16707 tcg_gen_movi_tl(t0
, v2
);
16708 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16710 case OPC_DEXTRV_RS_W
:
16711 tcg_gen_movi_tl(t0
, v2
);
16712 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16721 tcg_temp_free(v1_t
);
16722 tcg_temp_free(v2_t
);
16725 /* End MIPSDSP functions. */
16727 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16729 int rs
, rt
, rd
, sa
;
16732 rs
= (ctx
->opcode
>> 21) & 0x1f;
16733 rt
= (ctx
->opcode
>> 16) & 0x1f;
16734 rd
= (ctx
->opcode
>> 11) & 0x1f;
16735 sa
= (ctx
->opcode
>> 6) & 0x1f;
16737 op1
= MASK_SPECIAL(ctx
->opcode
);
16740 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16742 case OPC_MULT
... OPC_DIVU
:
16743 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16753 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16756 MIPS_INVAL("special_r6 muldiv");
16757 generate_exception_end(ctx
, EXCP_RI
);
16763 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16767 if (rt
== 0 && sa
== 1) {
16768 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16769 We need additionally to check other fields */
16770 gen_cl(ctx
, op1
, rd
, rs
);
16772 generate_exception_end(ctx
, EXCP_RI
);
16776 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16777 gen_helper_do_semihosting(cpu_env
);
16779 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16780 generate_exception_end(ctx
, EXCP_RI
);
16782 generate_exception_end(ctx
, EXCP_DBp
);
16786 #if defined(TARGET_MIPS64)
16788 check_mips_64(ctx
);
16789 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16793 if (rt
== 0 && sa
== 1) {
16794 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16795 We need additionally to check other fields */
16796 check_mips_64(ctx
);
16797 gen_cl(ctx
, op1
, rd
, rs
);
16799 generate_exception_end(ctx
, EXCP_RI
);
16802 case OPC_DMULT
... OPC_DDIVU
:
16803 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16813 check_mips_64(ctx
);
16814 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16817 MIPS_INVAL("special_r6 muldiv");
16818 generate_exception_end(ctx
, EXCP_RI
);
16823 default: /* Invalid */
16824 MIPS_INVAL("special_r6");
16825 generate_exception_end(ctx
, EXCP_RI
);
16830 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16832 int rs
, rt
, rd
, sa
;
16835 rs
= (ctx
->opcode
>> 21) & 0x1f;
16836 rt
= (ctx
->opcode
>> 16) & 0x1f;
16837 rd
= (ctx
->opcode
>> 11) & 0x1f;
16838 sa
= (ctx
->opcode
>> 6) & 0x1f;
16840 op1
= MASK_SPECIAL(ctx
->opcode
);
16842 case OPC_MOVN
: /* Conditional move */
16844 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16845 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16846 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16848 case OPC_MFHI
: /* Move from HI/LO */
16850 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16853 case OPC_MTLO
: /* Move to HI/LO */
16854 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16857 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16858 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16859 check_cp1_enabled(ctx
);
16860 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16861 (ctx
->opcode
>> 16) & 1);
16863 generate_exception_err(ctx
, EXCP_CpU
, 1);
16869 check_insn(ctx
, INSN_VR54XX
);
16870 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16871 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16873 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16878 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16880 #if defined(TARGET_MIPS64)
16881 case OPC_DMULT
... OPC_DDIVU
:
16882 check_insn(ctx
, ISA_MIPS3
);
16883 check_mips_64(ctx
);
16884 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16888 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16891 #ifdef MIPS_STRICT_STANDARD
16892 MIPS_INVAL("SPIM");
16893 generate_exception_end(ctx
, EXCP_RI
);
16895 /* Implemented as RI exception for now. */
16896 MIPS_INVAL("spim (unofficial)");
16897 generate_exception_end(ctx
, EXCP_RI
);
16900 default: /* Invalid */
16901 MIPS_INVAL("special_legacy");
16902 generate_exception_end(ctx
, EXCP_RI
);
16907 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16909 int rs
, rt
, rd
, sa
;
16912 rs
= (ctx
->opcode
>> 21) & 0x1f;
16913 rt
= (ctx
->opcode
>> 16) & 0x1f;
16914 rd
= (ctx
->opcode
>> 11) & 0x1f;
16915 sa
= (ctx
->opcode
>> 6) & 0x1f;
16917 op1
= MASK_SPECIAL(ctx
->opcode
);
16919 case OPC_SLL
: /* Shift with immediate */
16920 if (sa
== 5 && rd
== 0 &&
16921 rs
== 0 && rt
== 0) { /* PAUSE */
16922 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16923 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16924 generate_exception_end(ctx
, EXCP_RI
);
16930 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16933 switch ((ctx
->opcode
>> 21) & 0x1f) {
16935 /* rotr is decoded as srl on non-R2 CPUs */
16936 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16941 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16944 generate_exception_end(ctx
, EXCP_RI
);
16948 case OPC_ADD
... OPC_SUBU
:
16949 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16951 case OPC_SLLV
: /* Shifts */
16953 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16956 switch ((ctx
->opcode
>> 6) & 0x1f) {
16958 /* rotrv is decoded as srlv on non-R2 CPUs */
16959 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16964 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16967 generate_exception_end(ctx
, EXCP_RI
);
16971 case OPC_SLT
: /* Set on less than */
16973 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16975 case OPC_AND
: /* Logic*/
16979 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16982 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16984 case OPC_TGE
... OPC_TEQ
: /* Traps */
16986 check_insn(ctx
, ISA_MIPS2
);
16987 gen_trap(ctx
, op1
, rs
, rt
, -1);
16989 case OPC_LSA
: /* OPC_PMON */
16990 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16991 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16992 decode_opc_special_r6(env
, ctx
);
16994 /* Pmon entry point, also R4010 selsl */
16995 #ifdef MIPS_STRICT_STANDARD
16996 MIPS_INVAL("PMON / selsl");
16997 generate_exception_end(ctx
, EXCP_RI
);
16999 gen_helper_0e0i(pmon
, sa
);
17004 generate_exception_end(ctx
, EXCP_SYSCALL
);
17007 generate_exception_end(ctx
, EXCP_BREAK
);
17010 check_insn(ctx
, ISA_MIPS2
);
17011 /* Treat as NOP. */
17014 #if defined(TARGET_MIPS64)
17015 /* MIPS64 specific opcodes */
17020 check_insn(ctx
, ISA_MIPS3
);
17021 check_mips_64(ctx
);
17022 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17025 switch ((ctx
->opcode
>> 21) & 0x1f) {
17027 /* drotr is decoded as dsrl on non-R2 CPUs */
17028 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17033 check_insn(ctx
, ISA_MIPS3
);
17034 check_mips_64(ctx
);
17035 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17038 generate_exception_end(ctx
, EXCP_RI
);
17043 switch ((ctx
->opcode
>> 21) & 0x1f) {
17045 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17046 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17051 check_insn(ctx
, ISA_MIPS3
);
17052 check_mips_64(ctx
);
17053 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17056 generate_exception_end(ctx
, EXCP_RI
);
17060 case OPC_DADD
... OPC_DSUBU
:
17061 check_insn(ctx
, ISA_MIPS3
);
17062 check_mips_64(ctx
);
17063 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17067 check_insn(ctx
, ISA_MIPS3
);
17068 check_mips_64(ctx
);
17069 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17072 switch ((ctx
->opcode
>> 6) & 0x1f) {
17074 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17075 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17080 check_insn(ctx
, ISA_MIPS3
);
17081 check_mips_64(ctx
);
17082 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17085 generate_exception_end(ctx
, EXCP_RI
);
17090 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17091 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17092 decode_opc_special_r6(env
, ctx
);
17097 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17098 decode_opc_special_r6(env
, ctx
);
17100 decode_opc_special_legacy(env
, ctx
);
17105 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17110 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17112 rs
= (ctx
->opcode
>> 21) & 0x1f;
17113 rt
= (ctx
->opcode
>> 16) & 0x1f;
17114 rd
= (ctx
->opcode
>> 11) & 0x1f;
17116 op1
= MASK_SPECIAL2(ctx
->opcode
);
17118 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17119 case OPC_MSUB
... OPC_MSUBU
:
17120 check_insn(ctx
, ISA_MIPS32
);
17121 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17124 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17127 case OPC_DIVU_G_2F
:
17128 case OPC_MULT_G_2F
:
17129 case OPC_MULTU_G_2F
:
17131 case OPC_MODU_G_2F
:
17132 check_insn(ctx
, INSN_LOONGSON2F
);
17133 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17137 check_insn(ctx
, ISA_MIPS32
);
17138 gen_cl(ctx
, op1
, rd
, rs
);
17141 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17142 gen_helper_do_semihosting(cpu_env
);
17144 /* XXX: not clear which exception should be raised
17145 * when in debug mode...
17147 check_insn(ctx
, ISA_MIPS32
);
17148 generate_exception_end(ctx
, EXCP_DBp
);
17151 #if defined(TARGET_MIPS64)
17154 check_insn(ctx
, ISA_MIPS64
);
17155 check_mips_64(ctx
);
17156 gen_cl(ctx
, op1
, rd
, rs
);
17158 case OPC_DMULT_G_2F
:
17159 case OPC_DMULTU_G_2F
:
17160 case OPC_DDIV_G_2F
:
17161 case OPC_DDIVU_G_2F
:
17162 case OPC_DMOD_G_2F
:
17163 case OPC_DMODU_G_2F
:
17164 check_insn(ctx
, INSN_LOONGSON2F
);
17165 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17168 default: /* Invalid */
17169 MIPS_INVAL("special2_legacy");
17170 generate_exception_end(ctx
, EXCP_RI
);
17175 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17177 int rs
, rt
, rd
, sa
;
17181 rs
= (ctx
->opcode
>> 21) & 0x1f;
17182 rt
= (ctx
->opcode
>> 16) & 0x1f;
17183 rd
= (ctx
->opcode
>> 11) & 0x1f;
17184 sa
= (ctx
->opcode
>> 6) & 0x1f;
17185 imm
= (int16_t)ctx
->opcode
>> 7;
17187 op1
= MASK_SPECIAL3(ctx
->opcode
);
17191 /* hint codes 24-31 are reserved and signal RI */
17192 generate_exception_end(ctx
, EXCP_RI
);
17194 /* Treat as NOP. */
17197 /* Treat as NOP. */
17200 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17203 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17208 /* Treat as NOP. */
17211 op2
= MASK_BSHFL(ctx
->opcode
);
17213 case OPC_ALIGN
... OPC_ALIGN_END
:
17214 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17217 gen_bitswap(ctx
, op2
, rd
, rt
);
17222 #if defined(TARGET_MIPS64)
17224 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17227 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17230 check_mips_64(ctx
);
17233 /* Treat as NOP. */
17236 op2
= MASK_DBSHFL(ctx
->opcode
);
17238 case OPC_DALIGN
... OPC_DALIGN_END
:
17239 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17242 gen_bitswap(ctx
, op2
, rd
, rt
);
17249 default: /* Invalid */
17250 MIPS_INVAL("special3_r6");
17251 generate_exception_end(ctx
, EXCP_RI
);
17256 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17261 rs
= (ctx
->opcode
>> 21) & 0x1f;
17262 rt
= (ctx
->opcode
>> 16) & 0x1f;
17263 rd
= (ctx
->opcode
>> 11) & 0x1f;
17265 op1
= MASK_SPECIAL3(ctx
->opcode
);
17267 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17268 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17269 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17270 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17271 * the same mask and op1. */
17272 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17273 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17276 case OPC_ADDUH_R_QB
:
17278 case OPC_ADDQH_R_PH
:
17280 case OPC_ADDQH_R_W
:
17282 case OPC_SUBUH_R_QB
:
17284 case OPC_SUBQH_R_PH
:
17286 case OPC_SUBQH_R_W
:
17287 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17292 case OPC_MULQ_RS_W
:
17293 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17296 MIPS_INVAL("MASK ADDUH.QB");
17297 generate_exception_end(ctx
, EXCP_RI
);
17300 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17301 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17303 generate_exception_end(ctx
, EXCP_RI
);
17307 op2
= MASK_LX(ctx
->opcode
);
17309 #if defined(TARGET_MIPS64)
17315 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17317 default: /* Invalid */
17318 MIPS_INVAL("MASK LX");
17319 generate_exception_end(ctx
, EXCP_RI
);
17323 case OPC_ABSQ_S_PH_DSP
:
17324 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17326 case OPC_ABSQ_S_QB
:
17327 case OPC_ABSQ_S_PH
:
17329 case OPC_PRECEQ_W_PHL
:
17330 case OPC_PRECEQ_W_PHR
:
17331 case OPC_PRECEQU_PH_QBL
:
17332 case OPC_PRECEQU_PH_QBR
:
17333 case OPC_PRECEQU_PH_QBLA
:
17334 case OPC_PRECEQU_PH_QBRA
:
17335 case OPC_PRECEU_PH_QBL
:
17336 case OPC_PRECEU_PH_QBR
:
17337 case OPC_PRECEU_PH_QBLA
:
17338 case OPC_PRECEU_PH_QBRA
:
17339 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17346 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17349 MIPS_INVAL("MASK ABSQ_S.PH");
17350 generate_exception_end(ctx
, EXCP_RI
);
17354 case OPC_ADDU_QB_DSP
:
17355 op2
= MASK_ADDU_QB(ctx
->opcode
);
17358 case OPC_ADDQ_S_PH
:
17361 case OPC_ADDU_S_QB
:
17363 case OPC_ADDU_S_PH
:
17365 case OPC_SUBQ_S_PH
:
17368 case OPC_SUBU_S_QB
:
17370 case OPC_SUBU_S_PH
:
17374 case OPC_RADDU_W_QB
:
17375 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17377 case OPC_MULEU_S_PH_QBL
:
17378 case OPC_MULEU_S_PH_QBR
:
17379 case OPC_MULQ_RS_PH
:
17380 case OPC_MULEQ_S_W_PHL
:
17381 case OPC_MULEQ_S_W_PHR
:
17382 case OPC_MULQ_S_PH
:
17383 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17385 default: /* Invalid */
17386 MIPS_INVAL("MASK ADDU.QB");
17387 generate_exception_end(ctx
, EXCP_RI
);
17392 case OPC_CMPU_EQ_QB_DSP
:
17393 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17395 case OPC_PRECR_SRA_PH_W
:
17396 case OPC_PRECR_SRA_R_PH_W
:
17397 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17399 case OPC_PRECR_QB_PH
:
17400 case OPC_PRECRQ_QB_PH
:
17401 case OPC_PRECRQ_PH_W
:
17402 case OPC_PRECRQ_RS_PH_W
:
17403 case OPC_PRECRQU_S_QB_PH
:
17404 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17406 case OPC_CMPU_EQ_QB
:
17407 case OPC_CMPU_LT_QB
:
17408 case OPC_CMPU_LE_QB
:
17409 case OPC_CMP_EQ_PH
:
17410 case OPC_CMP_LT_PH
:
17411 case OPC_CMP_LE_PH
:
17412 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17414 case OPC_CMPGU_EQ_QB
:
17415 case OPC_CMPGU_LT_QB
:
17416 case OPC_CMPGU_LE_QB
:
17417 case OPC_CMPGDU_EQ_QB
:
17418 case OPC_CMPGDU_LT_QB
:
17419 case OPC_CMPGDU_LE_QB
:
17422 case OPC_PACKRL_PH
:
17423 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17425 default: /* Invalid */
17426 MIPS_INVAL("MASK CMPU.EQ.QB");
17427 generate_exception_end(ctx
, EXCP_RI
);
17431 case OPC_SHLL_QB_DSP
:
17432 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17434 case OPC_DPA_W_PH_DSP
:
17435 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17437 case OPC_DPAU_H_QBL
:
17438 case OPC_DPAU_H_QBR
:
17439 case OPC_DPSU_H_QBL
:
17440 case OPC_DPSU_H_QBR
:
17442 case OPC_DPAX_W_PH
:
17443 case OPC_DPAQ_S_W_PH
:
17444 case OPC_DPAQX_S_W_PH
:
17445 case OPC_DPAQX_SA_W_PH
:
17447 case OPC_DPSX_W_PH
:
17448 case OPC_DPSQ_S_W_PH
:
17449 case OPC_DPSQX_S_W_PH
:
17450 case OPC_DPSQX_SA_W_PH
:
17451 case OPC_MULSAQ_S_W_PH
:
17452 case OPC_DPAQ_SA_L_W
:
17453 case OPC_DPSQ_SA_L_W
:
17454 case OPC_MAQ_S_W_PHL
:
17455 case OPC_MAQ_S_W_PHR
:
17456 case OPC_MAQ_SA_W_PHL
:
17457 case OPC_MAQ_SA_W_PHR
:
17458 case OPC_MULSA_W_PH
:
17459 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17461 default: /* Invalid */
17462 MIPS_INVAL("MASK DPAW.PH");
17463 generate_exception_end(ctx
, EXCP_RI
);
17468 op2
= MASK_INSV(ctx
->opcode
);
17479 t0
= tcg_temp_new();
17480 t1
= tcg_temp_new();
17482 gen_load_gpr(t0
, rt
);
17483 gen_load_gpr(t1
, rs
);
17485 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17491 default: /* Invalid */
17492 MIPS_INVAL("MASK INSV");
17493 generate_exception_end(ctx
, EXCP_RI
);
17497 case OPC_APPEND_DSP
:
17498 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17500 case OPC_EXTR_W_DSP
:
17501 op2
= MASK_EXTR_W(ctx
->opcode
);
17505 case OPC_EXTR_RS_W
:
17507 case OPC_EXTRV_S_H
:
17509 case OPC_EXTRV_R_W
:
17510 case OPC_EXTRV_RS_W
:
17515 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17518 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17524 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17526 default: /* Invalid */
17527 MIPS_INVAL("MASK EXTR.W");
17528 generate_exception_end(ctx
, EXCP_RI
);
17532 #if defined(TARGET_MIPS64)
17533 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17534 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17535 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17536 check_insn(ctx
, INSN_LOONGSON2E
);
17537 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17539 case OPC_ABSQ_S_QH_DSP
:
17540 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17542 case OPC_PRECEQ_L_PWL
:
17543 case OPC_PRECEQ_L_PWR
:
17544 case OPC_PRECEQ_PW_QHL
:
17545 case OPC_PRECEQ_PW_QHR
:
17546 case OPC_PRECEQ_PW_QHLA
:
17547 case OPC_PRECEQ_PW_QHRA
:
17548 case OPC_PRECEQU_QH_OBL
:
17549 case OPC_PRECEQU_QH_OBR
:
17550 case OPC_PRECEQU_QH_OBLA
:
17551 case OPC_PRECEQU_QH_OBRA
:
17552 case OPC_PRECEU_QH_OBL
:
17553 case OPC_PRECEU_QH_OBR
:
17554 case OPC_PRECEU_QH_OBLA
:
17555 case OPC_PRECEU_QH_OBRA
:
17556 case OPC_ABSQ_S_OB
:
17557 case OPC_ABSQ_S_PW
:
17558 case OPC_ABSQ_S_QH
:
17559 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17567 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17569 default: /* Invalid */
17570 MIPS_INVAL("MASK ABSQ_S.QH");
17571 generate_exception_end(ctx
, EXCP_RI
);
17575 case OPC_ADDU_OB_DSP
:
17576 op2
= MASK_ADDU_OB(ctx
->opcode
);
17578 case OPC_RADDU_L_OB
:
17580 case OPC_SUBQ_S_PW
:
17582 case OPC_SUBQ_S_QH
:
17584 case OPC_SUBU_S_OB
:
17586 case OPC_SUBU_S_QH
:
17588 case OPC_SUBUH_R_OB
:
17590 case OPC_ADDQ_S_PW
:
17592 case OPC_ADDQ_S_QH
:
17594 case OPC_ADDU_S_OB
:
17596 case OPC_ADDU_S_QH
:
17598 case OPC_ADDUH_R_OB
:
17599 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17601 case OPC_MULEQ_S_PW_QHL
:
17602 case OPC_MULEQ_S_PW_QHR
:
17603 case OPC_MULEU_S_QH_OBL
:
17604 case OPC_MULEU_S_QH_OBR
:
17605 case OPC_MULQ_RS_QH
:
17606 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17608 default: /* Invalid */
17609 MIPS_INVAL("MASK ADDU.OB");
17610 generate_exception_end(ctx
, EXCP_RI
);
17614 case OPC_CMPU_EQ_OB_DSP
:
17615 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17617 case OPC_PRECR_SRA_QH_PW
:
17618 case OPC_PRECR_SRA_R_QH_PW
:
17619 /* Return value is rt. */
17620 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17622 case OPC_PRECR_OB_QH
:
17623 case OPC_PRECRQ_OB_QH
:
17624 case OPC_PRECRQ_PW_L
:
17625 case OPC_PRECRQ_QH_PW
:
17626 case OPC_PRECRQ_RS_QH_PW
:
17627 case OPC_PRECRQU_S_OB_QH
:
17628 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17630 case OPC_CMPU_EQ_OB
:
17631 case OPC_CMPU_LT_OB
:
17632 case OPC_CMPU_LE_OB
:
17633 case OPC_CMP_EQ_QH
:
17634 case OPC_CMP_LT_QH
:
17635 case OPC_CMP_LE_QH
:
17636 case OPC_CMP_EQ_PW
:
17637 case OPC_CMP_LT_PW
:
17638 case OPC_CMP_LE_PW
:
17639 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17641 case OPC_CMPGDU_EQ_OB
:
17642 case OPC_CMPGDU_LT_OB
:
17643 case OPC_CMPGDU_LE_OB
:
17644 case OPC_CMPGU_EQ_OB
:
17645 case OPC_CMPGU_LT_OB
:
17646 case OPC_CMPGU_LE_OB
:
17647 case OPC_PACKRL_PW
:
17651 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17653 default: /* Invalid */
17654 MIPS_INVAL("MASK CMPU_EQ.OB");
17655 generate_exception_end(ctx
, EXCP_RI
);
17659 case OPC_DAPPEND_DSP
:
17660 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17662 case OPC_DEXTR_W_DSP
:
17663 op2
= MASK_DEXTR_W(ctx
->opcode
);
17670 case OPC_DEXTR_R_L
:
17671 case OPC_DEXTR_RS_L
:
17673 case OPC_DEXTR_R_W
:
17674 case OPC_DEXTR_RS_W
:
17675 case OPC_DEXTR_S_H
:
17677 case OPC_DEXTRV_R_L
:
17678 case OPC_DEXTRV_RS_L
:
17679 case OPC_DEXTRV_S_H
:
17681 case OPC_DEXTRV_R_W
:
17682 case OPC_DEXTRV_RS_W
:
17683 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17688 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17690 default: /* Invalid */
17691 MIPS_INVAL("MASK EXTR.W");
17692 generate_exception_end(ctx
, EXCP_RI
);
17696 case OPC_DPAQ_W_QH_DSP
:
17697 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17699 case OPC_DPAU_H_OBL
:
17700 case OPC_DPAU_H_OBR
:
17701 case OPC_DPSU_H_OBL
:
17702 case OPC_DPSU_H_OBR
:
17704 case OPC_DPAQ_S_W_QH
:
17706 case OPC_DPSQ_S_W_QH
:
17707 case OPC_MULSAQ_S_W_QH
:
17708 case OPC_DPAQ_SA_L_PW
:
17709 case OPC_DPSQ_SA_L_PW
:
17710 case OPC_MULSAQ_S_L_PW
:
17711 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17713 case OPC_MAQ_S_W_QHLL
:
17714 case OPC_MAQ_S_W_QHLR
:
17715 case OPC_MAQ_S_W_QHRL
:
17716 case OPC_MAQ_S_W_QHRR
:
17717 case OPC_MAQ_SA_W_QHLL
:
17718 case OPC_MAQ_SA_W_QHLR
:
17719 case OPC_MAQ_SA_W_QHRL
:
17720 case OPC_MAQ_SA_W_QHRR
:
17721 case OPC_MAQ_S_L_PWL
:
17722 case OPC_MAQ_S_L_PWR
:
17727 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17729 default: /* Invalid */
17730 MIPS_INVAL("MASK DPAQ.W.QH");
17731 generate_exception_end(ctx
, EXCP_RI
);
17735 case OPC_DINSV_DSP
:
17736 op2
= MASK_INSV(ctx
->opcode
);
17747 t0
= tcg_temp_new();
17748 t1
= tcg_temp_new();
17750 gen_load_gpr(t0
, rt
);
17751 gen_load_gpr(t1
, rs
);
17753 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17759 default: /* Invalid */
17760 MIPS_INVAL("MASK DINSV");
17761 generate_exception_end(ctx
, EXCP_RI
);
17765 case OPC_SHLL_OB_DSP
:
17766 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17769 default: /* Invalid */
17770 MIPS_INVAL("special3_legacy");
17771 generate_exception_end(ctx
, EXCP_RI
);
17776 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17778 int rs
, rt
, rd
, sa
;
17781 rs
= (ctx
->opcode
>> 21) & 0x1f;
17782 rt
= (ctx
->opcode
>> 16) & 0x1f;
17783 rd
= (ctx
->opcode
>> 11) & 0x1f;
17784 sa
= (ctx
->opcode
>> 6) & 0x1f;
17786 op1
= MASK_SPECIAL3(ctx
->opcode
);
17790 check_insn(ctx
, ISA_MIPS32R2
);
17791 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17794 op2
= MASK_BSHFL(ctx
->opcode
);
17796 case OPC_ALIGN
... OPC_ALIGN_END
:
17798 check_insn(ctx
, ISA_MIPS32R6
);
17799 decode_opc_special3_r6(env
, ctx
);
17802 check_insn(ctx
, ISA_MIPS32R2
);
17803 gen_bshfl(ctx
, op2
, rt
, rd
);
17807 #if defined(TARGET_MIPS64)
17808 case OPC_DEXTM
... OPC_DEXT
:
17809 case OPC_DINSM
... OPC_DINS
:
17810 check_insn(ctx
, ISA_MIPS64R2
);
17811 check_mips_64(ctx
);
17812 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17815 op2
= MASK_DBSHFL(ctx
->opcode
);
17817 case OPC_DALIGN
... OPC_DALIGN_END
:
17819 check_insn(ctx
, ISA_MIPS32R6
);
17820 decode_opc_special3_r6(env
, ctx
);
17823 check_insn(ctx
, ISA_MIPS64R2
);
17824 check_mips_64(ctx
);
17825 op2
= MASK_DBSHFL(ctx
->opcode
);
17826 gen_bshfl(ctx
, op2
, rt
, rd
);
17832 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17835 check_insn(ctx
, ASE_MT
);
17837 TCGv t0
= tcg_temp_new();
17838 TCGv t1
= tcg_temp_new();
17840 gen_load_gpr(t0
, rt
);
17841 gen_load_gpr(t1
, rs
);
17842 gen_helper_fork(t0
, t1
);
17848 check_insn(ctx
, ASE_MT
);
17850 TCGv t0
= tcg_temp_new();
17852 gen_load_gpr(t0
, rs
);
17853 gen_helper_yield(t0
, cpu_env
, t0
);
17854 gen_store_gpr(t0
, rd
);
17859 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17860 decode_opc_special3_r6(env
, ctx
);
17862 decode_opc_special3_legacy(env
, ctx
);
17867 /* MIPS SIMD Architecture (MSA) */
17868 static inline int check_msa_access(DisasContext
*ctx
)
17870 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17871 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17872 generate_exception_end(ctx
, EXCP_RI
);
17876 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17877 if (ctx
->insn_flags
& ASE_MSA
) {
17878 generate_exception_end(ctx
, EXCP_MSADIS
);
17881 generate_exception_end(ctx
, EXCP_RI
);
17888 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17890 /* generates tcg ops to check if any element is 0 */
17891 /* Note this function only works with MSA_WRLEN = 128 */
17892 uint64_t eval_zero_or_big
= 0;
17893 uint64_t eval_big
= 0;
17894 TCGv_i64 t0
= tcg_temp_new_i64();
17895 TCGv_i64 t1
= tcg_temp_new_i64();
17898 eval_zero_or_big
= 0x0101010101010101ULL
;
17899 eval_big
= 0x8080808080808080ULL
;
17902 eval_zero_or_big
= 0x0001000100010001ULL
;
17903 eval_big
= 0x8000800080008000ULL
;
17906 eval_zero_or_big
= 0x0000000100000001ULL
;
17907 eval_big
= 0x8000000080000000ULL
;
17910 eval_zero_or_big
= 0x0000000000000001ULL
;
17911 eval_big
= 0x8000000000000000ULL
;
17914 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17915 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17916 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17917 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17918 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17919 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17920 tcg_gen_or_i64(t0
, t0
, t1
);
17921 /* if all bits are zero then all elements are not zero */
17922 /* if some bit is non-zero then some element is zero */
17923 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17924 tcg_gen_trunc_i64_tl(tresult
, t0
);
17925 tcg_temp_free_i64(t0
);
17926 tcg_temp_free_i64(t1
);
17929 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17931 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17932 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17933 int64_t s16
= (int16_t)ctx
->opcode
;
17935 check_msa_access(ctx
);
17937 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17938 generate_exception_end(ctx
, EXCP_RI
);
17945 TCGv_i64 t0
= tcg_temp_new_i64();
17946 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17947 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17948 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17949 tcg_gen_trunc_i64_tl(bcond
, t0
);
17950 tcg_temp_free_i64(t0
);
17957 gen_check_zero_element(bcond
, df
, wt
);
17963 gen_check_zero_element(bcond
, df
, wt
);
17964 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17968 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17970 ctx
->hflags
|= MIPS_HFLAG_BC
;
17971 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17974 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17976 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17977 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17978 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17979 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17981 TCGv_i32 twd
= tcg_const_i32(wd
);
17982 TCGv_i32 tws
= tcg_const_i32(ws
);
17983 TCGv_i32 ti8
= tcg_const_i32(i8
);
17985 switch (MASK_MSA_I8(ctx
->opcode
)) {
17987 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17990 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17993 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17996 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17999 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18002 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18005 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18011 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18012 if (df
== DF_DOUBLE
) {
18013 generate_exception_end(ctx
, EXCP_RI
);
18015 TCGv_i32 tdf
= tcg_const_i32(df
);
18016 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18017 tcg_temp_free_i32(tdf
);
18022 MIPS_INVAL("MSA instruction");
18023 generate_exception_end(ctx
, EXCP_RI
);
18027 tcg_temp_free_i32(twd
);
18028 tcg_temp_free_i32(tws
);
18029 tcg_temp_free_i32(ti8
);
18032 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18034 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18035 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18036 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18037 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18038 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18039 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18041 TCGv_i32 tdf
= tcg_const_i32(df
);
18042 TCGv_i32 twd
= tcg_const_i32(wd
);
18043 TCGv_i32 tws
= tcg_const_i32(ws
);
18044 TCGv_i32 timm
= tcg_temp_new_i32();
18045 tcg_gen_movi_i32(timm
, u5
);
18047 switch (MASK_MSA_I5(ctx
->opcode
)) {
18049 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18052 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18054 case OPC_MAXI_S_df
:
18055 tcg_gen_movi_i32(timm
, s5
);
18056 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18058 case OPC_MAXI_U_df
:
18059 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18061 case OPC_MINI_S_df
:
18062 tcg_gen_movi_i32(timm
, s5
);
18063 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18065 case OPC_MINI_U_df
:
18066 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18069 tcg_gen_movi_i32(timm
, s5
);
18070 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18072 case OPC_CLTI_S_df
:
18073 tcg_gen_movi_i32(timm
, s5
);
18074 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18076 case OPC_CLTI_U_df
:
18077 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18079 case OPC_CLEI_S_df
:
18080 tcg_gen_movi_i32(timm
, s5
);
18081 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18083 case OPC_CLEI_U_df
:
18084 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18088 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18089 tcg_gen_movi_i32(timm
, s10
);
18090 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18094 MIPS_INVAL("MSA instruction");
18095 generate_exception_end(ctx
, EXCP_RI
);
18099 tcg_temp_free_i32(tdf
);
18100 tcg_temp_free_i32(twd
);
18101 tcg_temp_free_i32(tws
);
18102 tcg_temp_free_i32(timm
);
18105 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18107 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18108 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18109 uint32_t df
= 0, m
= 0;
18110 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18111 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18118 if ((dfm
& 0x40) == 0x00) {
18121 } else if ((dfm
& 0x60) == 0x40) {
18124 } else if ((dfm
& 0x70) == 0x60) {
18127 } else if ((dfm
& 0x78) == 0x70) {
18131 generate_exception_end(ctx
, EXCP_RI
);
18135 tdf
= tcg_const_i32(df
);
18136 tm
= tcg_const_i32(m
);
18137 twd
= tcg_const_i32(wd
);
18138 tws
= tcg_const_i32(ws
);
18140 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18142 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18145 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18148 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18151 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18154 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18157 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18159 case OPC_BINSLI_df
:
18160 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18162 case OPC_BINSRI_df
:
18163 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18166 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18169 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18172 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18175 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18178 MIPS_INVAL("MSA instruction");
18179 generate_exception_end(ctx
, EXCP_RI
);
18183 tcg_temp_free_i32(tdf
);
18184 tcg_temp_free_i32(tm
);
18185 tcg_temp_free_i32(twd
);
18186 tcg_temp_free_i32(tws
);
18189 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18191 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18192 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18193 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18194 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18195 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18197 TCGv_i32 tdf
= tcg_const_i32(df
);
18198 TCGv_i32 twd
= tcg_const_i32(wd
);
18199 TCGv_i32 tws
= tcg_const_i32(ws
);
18200 TCGv_i32 twt
= tcg_const_i32(wt
);
18202 switch (MASK_MSA_3R(ctx
->opcode
)) {
18204 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18207 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18210 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18213 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18215 case OPC_SUBS_S_df
:
18216 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18219 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18222 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18225 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18228 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18231 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18233 case OPC_ADDS_A_df
:
18234 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18236 case OPC_SUBS_U_df
:
18237 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18240 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18243 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18246 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18249 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18252 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18255 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18257 case OPC_ADDS_S_df
:
18258 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18260 case OPC_SUBSUS_U_df
:
18261 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18264 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18267 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18270 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18273 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18276 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18279 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18281 case OPC_ADDS_U_df
:
18282 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18284 case OPC_SUBSUU_S_df
:
18285 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18288 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18291 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18294 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18297 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18300 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18302 case OPC_ASUB_S_df
:
18303 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18306 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18309 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18312 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18315 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18318 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18321 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18323 case OPC_ASUB_U_df
:
18324 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18327 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18330 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18333 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18336 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18338 case OPC_AVER_S_df
:
18339 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18342 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18345 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18348 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18351 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18353 case OPC_AVER_U_df
:
18354 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18357 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18360 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18363 case OPC_DOTP_S_df
:
18364 case OPC_DOTP_U_df
:
18365 case OPC_DPADD_S_df
:
18366 case OPC_DPADD_U_df
:
18367 case OPC_DPSUB_S_df
:
18368 case OPC_HADD_S_df
:
18369 case OPC_DPSUB_U_df
:
18370 case OPC_HADD_U_df
:
18371 case OPC_HSUB_S_df
:
18372 case OPC_HSUB_U_df
:
18373 if (df
== DF_BYTE
) {
18374 generate_exception_end(ctx
, EXCP_RI
);
18377 switch (MASK_MSA_3R(ctx
->opcode
)) {
18378 case OPC_DOTP_S_df
:
18379 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18381 case OPC_DOTP_U_df
:
18382 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18384 case OPC_DPADD_S_df
:
18385 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18387 case OPC_DPADD_U_df
:
18388 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18390 case OPC_DPSUB_S_df
:
18391 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18393 case OPC_HADD_S_df
:
18394 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18396 case OPC_DPSUB_U_df
:
18397 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18399 case OPC_HADD_U_df
:
18400 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18402 case OPC_HSUB_S_df
:
18403 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18405 case OPC_HSUB_U_df
:
18406 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18411 MIPS_INVAL("MSA instruction");
18412 generate_exception_end(ctx
, EXCP_RI
);
18415 tcg_temp_free_i32(twd
);
18416 tcg_temp_free_i32(tws
);
18417 tcg_temp_free_i32(twt
);
18418 tcg_temp_free_i32(tdf
);
18421 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18423 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18424 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18425 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18426 TCGv telm
= tcg_temp_new();
18427 TCGv_i32 tsr
= tcg_const_i32(source
);
18428 TCGv_i32 tdt
= tcg_const_i32(dest
);
18430 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18432 gen_load_gpr(telm
, source
);
18433 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18436 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18437 gen_store_gpr(telm
, dest
);
18440 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18443 MIPS_INVAL("MSA instruction");
18444 generate_exception_end(ctx
, EXCP_RI
);
18448 tcg_temp_free(telm
);
18449 tcg_temp_free_i32(tdt
);
18450 tcg_temp_free_i32(tsr
);
18453 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18456 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18457 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18458 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18460 TCGv_i32 tws
= tcg_const_i32(ws
);
18461 TCGv_i32 twd
= tcg_const_i32(wd
);
18462 TCGv_i32 tn
= tcg_const_i32(n
);
18463 TCGv_i32 tdf
= tcg_const_i32(df
);
18465 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18467 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18469 case OPC_SPLATI_df
:
18470 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18473 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18475 case OPC_COPY_S_df
:
18476 case OPC_COPY_U_df
:
18477 case OPC_INSERT_df
:
18478 #if !defined(TARGET_MIPS64)
18479 /* Double format valid only for MIPS64 */
18480 if (df
== DF_DOUBLE
) {
18481 generate_exception_end(ctx
, EXCP_RI
);
18485 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18486 case OPC_COPY_S_df
:
18487 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18489 case OPC_COPY_U_df
:
18490 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18492 case OPC_INSERT_df
:
18493 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18498 MIPS_INVAL("MSA instruction");
18499 generate_exception_end(ctx
, EXCP_RI
);
18501 tcg_temp_free_i32(twd
);
18502 tcg_temp_free_i32(tws
);
18503 tcg_temp_free_i32(tn
);
18504 tcg_temp_free_i32(tdf
);
18507 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18509 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18510 uint32_t df
= 0, n
= 0;
18512 if ((dfn
& 0x30) == 0x00) {
18515 } else if ((dfn
& 0x38) == 0x20) {
18518 } else if ((dfn
& 0x3c) == 0x30) {
18521 } else if ((dfn
& 0x3e) == 0x38) {
18524 } else if (dfn
== 0x3E) {
18525 /* CTCMSA, CFCMSA, MOVE.V */
18526 gen_msa_elm_3e(env
, ctx
);
18529 generate_exception_end(ctx
, EXCP_RI
);
18533 gen_msa_elm_df(env
, ctx
, df
, n
);
18536 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18538 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18539 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18540 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18541 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18542 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18544 TCGv_i32 twd
= tcg_const_i32(wd
);
18545 TCGv_i32 tws
= tcg_const_i32(ws
);
18546 TCGv_i32 twt
= tcg_const_i32(wt
);
18547 TCGv_i32 tdf
= tcg_temp_new_i32();
18549 /* adjust df value for floating-point instruction */
18550 tcg_gen_movi_i32(tdf
, df
+ 2);
18552 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18554 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18557 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18560 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18563 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18566 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18569 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18572 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18575 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18578 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18581 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18584 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18587 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18590 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18593 tcg_gen_movi_i32(tdf
, df
+ 1);
18594 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18597 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18600 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18602 case OPC_MADD_Q_df
:
18603 tcg_gen_movi_i32(tdf
, df
+ 1);
18604 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18607 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 case OPC_MSUB_Q_df
:
18610 tcg_gen_movi_i32(tdf
, df
+ 1);
18611 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18614 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18617 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18620 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18623 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18626 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18629 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18632 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18635 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18638 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18641 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18644 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18647 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18650 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18652 case OPC_MULR_Q_df
:
18653 tcg_gen_movi_i32(tdf
, df
+ 1);
18654 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18657 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18659 case OPC_FMIN_A_df
:
18660 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18662 case OPC_MADDR_Q_df
:
18663 tcg_gen_movi_i32(tdf
, df
+ 1);
18664 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18667 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18670 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18672 case OPC_MSUBR_Q_df
:
18673 tcg_gen_movi_i32(tdf
, df
+ 1);
18674 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18677 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18679 case OPC_FMAX_A_df
:
18680 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18683 MIPS_INVAL("MSA instruction");
18684 generate_exception_end(ctx
, EXCP_RI
);
18688 tcg_temp_free_i32(twd
);
18689 tcg_temp_free_i32(tws
);
18690 tcg_temp_free_i32(twt
);
18691 tcg_temp_free_i32(tdf
);
18694 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18696 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18697 (op & (0x7 << 18)))
18698 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18699 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18700 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18701 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18702 TCGv_i32 twd
= tcg_const_i32(wd
);
18703 TCGv_i32 tws
= tcg_const_i32(ws
);
18704 TCGv_i32 twt
= tcg_const_i32(wt
);
18705 TCGv_i32 tdf
= tcg_const_i32(df
);
18707 switch (MASK_MSA_2R(ctx
->opcode
)) {
18709 #if !defined(TARGET_MIPS64)
18710 /* Double format valid only for MIPS64 */
18711 if (df
== DF_DOUBLE
) {
18712 generate_exception_end(ctx
, EXCP_RI
);
18716 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18719 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18722 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18725 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18728 MIPS_INVAL("MSA instruction");
18729 generate_exception_end(ctx
, EXCP_RI
);
18733 tcg_temp_free_i32(twd
);
18734 tcg_temp_free_i32(tws
);
18735 tcg_temp_free_i32(twt
);
18736 tcg_temp_free_i32(tdf
);
18739 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18741 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18742 (op & (0xf << 17)))
18743 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18744 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18745 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18746 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18747 TCGv_i32 twd
= tcg_const_i32(wd
);
18748 TCGv_i32 tws
= tcg_const_i32(ws
);
18749 TCGv_i32 twt
= tcg_const_i32(wt
);
18750 /* adjust df value for floating-point instruction */
18751 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18753 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18754 case OPC_FCLASS_df
:
18755 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18757 case OPC_FTRUNC_S_df
:
18758 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18760 case OPC_FTRUNC_U_df
:
18761 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18764 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18766 case OPC_FRSQRT_df
:
18767 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18770 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18773 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18776 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18778 case OPC_FEXUPL_df
:
18779 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18781 case OPC_FEXUPR_df
:
18782 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18785 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18788 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18790 case OPC_FTINT_S_df
:
18791 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18793 case OPC_FTINT_U_df
:
18794 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18796 case OPC_FFINT_S_df
:
18797 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18799 case OPC_FFINT_U_df
:
18800 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18804 tcg_temp_free_i32(twd
);
18805 tcg_temp_free_i32(tws
);
18806 tcg_temp_free_i32(twt
);
18807 tcg_temp_free_i32(tdf
);
18810 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18812 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18813 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18814 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18815 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18816 TCGv_i32 twd
= tcg_const_i32(wd
);
18817 TCGv_i32 tws
= tcg_const_i32(ws
);
18818 TCGv_i32 twt
= tcg_const_i32(wt
);
18820 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18822 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18825 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18828 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18831 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18834 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18837 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18840 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18843 MIPS_INVAL("MSA instruction");
18844 generate_exception_end(ctx
, EXCP_RI
);
18848 tcg_temp_free_i32(twd
);
18849 tcg_temp_free_i32(tws
);
18850 tcg_temp_free_i32(twt
);
18853 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18855 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18863 gen_msa_vec_v(env
, ctx
);
18866 gen_msa_2r(env
, ctx
);
18869 gen_msa_2rf(env
, ctx
);
18872 MIPS_INVAL("MSA instruction");
18873 generate_exception_end(ctx
, EXCP_RI
);
18878 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18880 uint32_t opcode
= ctx
->opcode
;
18881 check_insn(ctx
, ASE_MSA
);
18882 check_msa_access(ctx
);
18884 switch (MASK_MSA_MINOR(opcode
)) {
18885 case OPC_MSA_I8_00
:
18886 case OPC_MSA_I8_01
:
18887 case OPC_MSA_I8_02
:
18888 gen_msa_i8(env
, ctx
);
18890 case OPC_MSA_I5_06
:
18891 case OPC_MSA_I5_07
:
18892 gen_msa_i5(env
, ctx
);
18894 case OPC_MSA_BIT_09
:
18895 case OPC_MSA_BIT_0A
:
18896 gen_msa_bit(env
, ctx
);
18898 case OPC_MSA_3R_0D
:
18899 case OPC_MSA_3R_0E
:
18900 case OPC_MSA_3R_0F
:
18901 case OPC_MSA_3R_10
:
18902 case OPC_MSA_3R_11
:
18903 case OPC_MSA_3R_12
:
18904 case OPC_MSA_3R_13
:
18905 case OPC_MSA_3R_14
:
18906 case OPC_MSA_3R_15
:
18907 gen_msa_3r(env
, ctx
);
18910 gen_msa_elm(env
, ctx
);
18912 case OPC_MSA_3RF_1A
:
18913 case OPC_MSA_3RF_1B
:
18914 case OPC_MSA_3RF_1C
:
18915 gen_msa_3rf(env
, ctx
);
18918 gen_msa_vec(env
, ctx
);
18929 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18930 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18931 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18932 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18934 TCGv_i32 twd
= tcg_const_i32(wd
);
18935 TCGv taddr
= tcg_temp_new();
18936 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18938 switch (MASK_MSA_MINOR(opcode
)) {
18940 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18943 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18946 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18949 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18952 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18955 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18958 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18961 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18965 tcg_temp_free_i32(twd
);
18966 tcg_temp_free(taddr
);
18970 MIPS_INVAL("MSA instruction");
18971 generate_exception_end(ctx
, EXCP_RI
);
18977 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18980 int rs
, rt
, rd
, sa
;
18984 /* make sure instructions are on a word boundary */
18985 if (ctx
->pc
& 0x3) {
18986 env
->CP0_BadVAddr
= ctx
->pc
;
18987 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18991 /* Handle blikely not taken case */
18992 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18993 TCGLabel
*l1
= gen_new_label();
18995 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18996 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18997 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19001 op
= MASK_OP_MAJOR(ctx
->opcode
);
19002 rs
= (ctx
->opcode
>> 21) & 0x1f;
19003 rt
= (ctx
->opcode
>> 16) & 0x1f;
19004 rd
= (ctx
->opcode
>> 11) & 0x1f;
19005 sa
= (ctx
->opcode
>> 6) & 0x1f;
19006 imm
= (int16_t)ctx
->opcode
;
19009 decode_opc_special(env
, ctx
);
19012 decode_opc_special2_legacy(env
, ctx
);
19015 decode_opc_special3(env
, ctx
);
19018 op1
= MASK_REGIMM(ctx
->opcode
);
19020 case OPC_BLTZL
: /* REGIMM branches */
19024 check_insn(ctx
, ISA_MIPS2
);
19025 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19029 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19033 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19035 /* OPC_NAL, OPC_BAL */
19036 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19038 generate_exception_end(ctx
, EXCP_RI
);
19041 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19044 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19046 check_insn(ctx
, ISA_MIPS2
);
19047 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19048 gen_trap(ctx
, op1
, rs
, -1, imm
);
19051 check_insn(ctx
, ISA_MIPS32R6
);
19052 generate_exception_end(ctx
, EXCP_RI
);
19055 check_insn(ctx
, ISA_MIPS32R2
);
19056 /* Break the TB to be able to sync copied instructions
19058 ctx
->bstate
= BS_STOP
;
19060 case OPC_BPOSGE32
: /* MIPS DSP branch */
19061 #if defined(TARGET_MIPS64)
19065 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19067 #if defined(TARGET_MIPS64)
19069 check_insn(ctx
, ISA_MIPS32R6
);
19070 check_mips_64(ctx
);
19072 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19076 check_insn(ctx
, ISA_MIPS32R6
);
19077 check_mips_64(ctx
);
19079 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19083 default: /* Invalid */
19084 MIPS_INVAL("regimm");
19085 generate_exception_end(ctx
, EXCP_RI
);
19090 check_cp0_enabled(ctx
);
19091 op1
= MASK_CP0(ctx
->opcode
);
19099 #if defined(TARGET_MIPS64)
19103 #ifndef CONFIG_USER_ONLY
19104 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19105 #endif /* !CONFIG_USER_ONLY */
19107 case OPC_C0_FIRST
... OPC_C0_LAST
:
19108 #ifndef CONFIG_USER_ONLY
19109 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19110 #endif /* !CONFIG_USER_ONLY */
19113 #ifndef CONFIG_USER_ONLY
19116 TCGv t0
= tcg_temp_new();
19118 op2
= MASK_MFMC0(ctx
->opcode
);
19121 check_insn(ctx
, ASE_MT
);
19122 gen_helper_dmt(t0
);
19123 gen_store_gpr(t0
, rt
);
19126 check_insn(ctx
, ASE_MT
);
19127 gen_helper_emt(t0
);
19128 gen_store_gpr(t0
, rt
);
19131 check_insn(ctx
, ASE_MT
);
19132 gen_helper_dvpe(t0
, cpu_env
);
19133 gen_store_gpr(t0
, rt
);
19136 check_insn(ctx
, ASE_MT
);
19137 gen_helper_evpe(t0
, cpu_env
);
19138 gen_store_gpr(t0
, rt
);
19141 check_insn(ctx
, ISA_MIPS32R6
);
19143 gen_helper_dvp(t0
, cpu_env
);
19144 gen_store_gpr(t0
, rt
);
19148 check_insn(ctx
, ISA_MIPS32R6
);
19150 gen_helper_evp(t0
, cpu_env
);
19151 gen_store_gpr(t0
, rt
);
19155 check_insn(ctx
, ISA_MIPS32R2
);
19156 save_cpu_state(ctx
, 1);
19157 gen_helper_di(t0
, cpu_env
);
19158 gen_store_gpr(t0
, rt
);
19159 /* Stop translation as we may have switched
19160 the execution mode. */
19161 ctx
->bstate
= BS_STOP
;
19164 check_insn(ctx
, ISA_MIPS32R2
);
19165 save_cpu_state(ctx
, 1);
19166 gen_helper_ei(t0
, cpu_env
);
19167 gen_store_gpr(t0
, rt
);
19168 /* Stop translation as we may have switched
19169 the execution mode. */
19170 ctx
->bstate
= BS_STOP
;
19172 default: /* Invalid */
19173 MIPS_INVAL("mfmc0");
19174 generate_exception_end(ctx
, EXCP_RI
);
19179 #endif /* !CONFIG_USER_ONLY */
19182 check_insn(ctx
, ISA_MIPS32R2
);
19183 gen_load_srsgpr(rt
, rd
);
19186 check_insn(ctx
, ISA_MIPS32R2
);
19187 gen_store_srsgpr(rt
, rd
);
19191 generate_exception_end(ctx
, EXCP_RI
);
19195 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19196 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19197 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19198 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19201 /* Arithmetic with immediate opcode */
19202 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19206 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19208 case OPC_SLTI
: /* Set on less than with immediate opcode */
19210 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19212 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19213 case OPC_LUI
: /* OPC_AUI */
19216 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19218 case OPC_J
... OPC_JAL
: /* Jump */
19219 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19220 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19223 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19224 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19226 generate_exception_end(ctx
, EXCP_RI
);
19229 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19230 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19233 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19236 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19237 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19239 generate_exception_end(ctx
, EXCP_RI
);
19242 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19243 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19246 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19249 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19252 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19254 check_insn(ctx
, ISA_MIPS32R6
);
19255 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19256 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19259 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19262 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19264 check_insn(ctx
, ISA_MIPS32R6
);
19265 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19266 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19271 check_insn(ctx
, ISA_MIPS2
);
19272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19276 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19278 case OPC_LL
: /* Load and stores */
19279 check_insn(ctx
, ISA_MIPS2
);
19283 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19285 case OPC_LB
... OPC_LH
:
19286 case OPC_LW
... OPC_LHU
:
19287 gen_ld(ctx
, op
, rt
, rs
, imm
);
19291 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19293 case OPC_SB
... OPC_SH
:
19295 gen_st(ctx
, op
, rt
, rs
, imm
);
19298 check_insn(ctx
, ISA_MIPS2
);
19299 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19300 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19304 check_cp0_enabled(ctx
);
19305 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19306 /* Treat as NOP. */
19309 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19310 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19311 /* Treat as NOP. */
19314 /* Floating point (COP1). */
19319 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19323 op1
= MASK_CP1(ctx
->opcode
);
19328 check_cp1_enabled(ctx
);
19329 check_insn(ctx
, ISA_MIPS32R2
);
19334 check_cp1_enabled(ctx
);
19335 gen_cp1(ctx
, op1
, rt
, rd
);
19337 #if defined(TARGET_MIPS64)
19340 check_cp1_enabled(ctx
);
19341 check_insn(ctx
, ISA_MIPS3
);
19342 check_mips_64(ctx
);
19343 gen_cp1(ctx
, op1
, rt
, rd
);
19346 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19347 check_cp1_enabled(ctx
);
19348 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19350 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19355 check_insn(ctx
, ASE_MIPS3D
);
19356 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19357 (rt
>> 2) & 0x7, imm
<< 2);
19361 check_cp1_enabled(ctx
);
19362 check_insn(ctx
, ISA_MIPS32R6
);
19363 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19367 check_cp1_enabled(ctx
);
19368 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19370 check_insn(ctx
, ASE_MIPS3D
);
19373 check_cp1_enabled(ctx
);
19374 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19375 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19376 (rt
>> 2) & 0x7, imm
<< 2);
19383 check_cp1_enabled(ctx
);
19384 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19390 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19391 check_cp1_enabled(ctx
);
19392 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19394 case R6_OPC_CMP_AF_S
:
19395 case R6_OPC_CMP_UN_S
:
19396 case R6_OPC_CMP_EQ_S
:
19397 case R6_OPC_CMP_UEQ_S
:
19398 case R6_OPC_CMP_LT_S
:
19399 case R6_OPC_CMP_ULT_S
:
19400 case R6_OPC_CMP_LE_S
:
19401 case R6_OPC_CMP_ULE_S
:
19402 case R6_OPC_CMP_SAF_S
:
19403 case R6_OPC_CMP_SUN_S
:
19404 case R6_OPC_CMP_SEQ_S
:
19405 case R6_OPC_CMP_SEUQ_S
:
19406 case R6_OPC_CMP_SLT_S
:
19407 case R6_OPC_CMP_SULT_S
:
19408 case R6_OPC_CMP_SLE_S
:
19409 case R6_OPC_CMP_SULE_S
:
19410 case R6_OPC_CMP_OR_S
:
19411 case R6_OPC_CMP_UNE_S
:
19412 case R6_OPC_CMP_NE_S
:
19413 case R6_OPC_CMP_SOR_S
:
19414 case R6_OPC_CMP_SUNE_S
:
19415 case R6_OPC_CMP_SNE_S
:
19416 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19418 case R6_OPC_CMP_AF_D
:
19419 case R6_OPC_CMP_UN_D
:
19420 case R6_OPC_CMP_EQ_D
:
19421 case R6_OPC_CMP_UEQ_D
:
19422 case R6_OPC_CMP_LT_D
:
19423 case R6_OPC_CMP_ULT_D
:
19424 case R6_OPC_CMP_LE_D
:
19425 case R6_OPC_CMP_ULE_D
:
19426 case R6_OPC_CMP_SAF_D
:
19427 case R6_OPC_CMP_SUN_D
:
19428 case R6_OPC_CMP_SEQ_D
:
19429 case R6_OPC_CMP_SEUQ_D
:
19430 case R6_OPC_CMP_SLT_D
:
19431 case R6_OPC_CMP_SULT_D
:
19432 case R6_OPC_CMP_SLE_D
:
19433 case R6_OPC_CMP_SULE_D
:
19434 case R6_OPC_CMP_OR_D
:
19435 case R6_OPC_CMP_UNE_D
:
19436 case R6_OPC_CMP_NE_D
:
19437 case R6_OPC_CMP_SOR_D
:
19438 case R6_OPC_CMP_SUNE_D
:
19439 case R6_OPC_CMP_SNE_D
:
19440 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19443 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19444 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19449 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19464 check_insn(ctx
, ASE_MSA
);
19465 gen_msa_branch(env
, ctx
, op1
);
19469 generate_exception_end(ctx
, EXCP_RI
);
19474 /* Compact branches [R6] and COP2 [non-R6] */
19475 case OPC_BC
: /* OPC_LWC2 */
19476 case OPC_BALC
: /* OPC_SWC2 */
19477 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19478 /* OPC_BC, OPC_BALC */
19479 gen_compute_compact_branch(ctx
, op
, 0, 0,
19480 sextract32(ctx
->opcode
<< 2, 0, 28));
19482 /* OPC_LWC2, OPC_SWC2 */
19483 /* COP2: Not implemented. */
19484 generate_exception_err(ctx
, EXCP_CpU
, 2);
19487 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19488 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19489 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19491 /* OPC_BEQZC, OPC_BNEZC */
19492 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19493 sextract32(ctx
->opcode
<< 2, 0, 23));
19495 /* OPC_JIC, OPC_JIALC */
19496 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19499 /* OPC_LWC2, OPC_SWC2 */
19500 /* COP2: Not implemented. */
19501 generate_exception_err(ctx
, EXCP_CpU
, 2);
19505 check_insn(ctx
, INSN_LOONGSON2F
);
19506 /* Note that these instructions use different fields. */
19507 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19511 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19512 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19513 check_cp1_enabled(ctx
);
19514 op1
= MASK_CP3(ctx
->opcode
);
19518 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19524 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19525 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19528 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19529 /* Treat as NOP. */
19532 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19546 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19547 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19551 generate_exception_end(ctx
, EXCP_RI
);
19555 generate_exception_err(ctx
, EXCP_CpU
, 1);
19559 #if defined(TARGET_MIPS64)
19560 /* MIPS64 opcodes */
19561 case OPC_LDL
... OPC_LDR
:
19563 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19567 check_insn(ctx
, ISA_MIPS3
);
19568 check_mips_64(ctx
);
19569 gen_ld(ctx
, op
, rt
, rs
, imm
);
19571 case OPC_SDL
... OPC_SDR
:
19572 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19575 check_insn(ctx
, ISA_MIPS3
);
19576 check_mips_64(ctx
);
19577 gen_st(ctx
, op
, rt
, rs
, imm
);
19580 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19581 check_insn(ctx
, ISA_MIPS3
);
19582 check_mips_64(ctx
);
19583 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19585 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19587 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19588 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19591 check_insn(ctx
, ISA_MIPS3
);
19592 check_mips_64(ctx
);
19593 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19597 check_insn(ctx
, ISA_MIPS3
);
19598 check_mips_64(ctx
);
19599 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19602 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19603 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19604 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19606 MIPS_INVAL("major opcode");
19607 generate_exception_end(ctx
, EXCP_RI
);
19611 case OPC_DAUI
: /* OPC_JALX */
19612 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19613 #if defined(TARGET_MIPS64)
19615 check_mips_64(ctx
);
19617 generate_exception(ctx
, EXCP_RI
);
19618 } else if (rt
!= 0) {
19619 TCGv t0
= tcg_temp_new();
19620 gen_load_gpr(t0
, rs
);
19621 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19625 generate_exception_end(ctx
, EXCP_RI
);
19626 MIPS_INVAL("major opcode");
19630 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19631 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19632 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19635 case OPC_MSA
: /* OPC_MDMX */
19636 /* MDMX: Not implemented. */
19640 check_insn(ctx
, ISA_MIPS32R6
);
19641 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19643 default: /* Invalid */
19644 MIPS_INVAL("major opcode");
19645 generate_exception_end(ctx
, EXCP_RI
);
19650 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19652 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19653 CPUState
*cs
= CPU(cpu
);
19655 target_ulong pc_start
;
19656 target_ulong next_page_start
;
19663 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19666 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19667 ctx
.insn_flags
= env
->insn_flags
;
19668 ctx
.CP0_Config1
= env
->CP0_Config1
;
19670 ctx
.bstate
= BS_NONE
;
19672 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19673 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19674 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19675 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19676 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19677 ctx
.PAMask
= env
->PAMask
;
19678 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19679 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19680 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19681 /* Restore delay slot state from the tb context. */
19682 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19683 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19684 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19685 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19686 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19687 restore_cpu_state(env
, &ctx
);
19688 #ifdef CONFIG_USER_ONLY
19689 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19691 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19693 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19694 MO_UNALN
: MO_ALIGN
;
19696 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19697 if (max_insns
== 0) {
19698 max_insns
= CF_COUNT_MASK
;
19700 if (max_insns
> TCG_MAX_INSNS
) {
19701 max_insns
= TCG_MAX_INSNS
;
19704 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19706 while (ctx
.bstate
== BS_NONE
) {
19707 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19710 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19711 save_cpu_state(&ctx
, 1);
19712 ctx
.bstate
= BS_BRANCH
;
19713 gen_helper_raise_exception_debug(cpu_env
);
19714 /* The address covered by the breakpoint must be included in
19715 [tb->pc, tb->pc + tb->size) in order to for it to be
19716 properly cleared -- thus we increment the PC here so that
19717 the logic setting tb->size below does the right thing. */
19719 goto done_generating
;
19722 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19726 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19727 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19728 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19730 decode_opc(env
, &ctx
);
19731 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19732 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19733 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19734 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19735 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19736 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19738 generate_exception_end(&ctx
, EXCP_RI
);
19742 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19743 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19744 MIPS_HFLAG_FBNSLOT
))) {
19745 /* force to generate branch as there is neither delay nor
19749 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19750 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19751 /* Force to generate branch as microMIPS R6 doesn't restrict
19752 branches in the forbidden slot. */
19757 gen_branch(&ctx
, insn_bytes
);
19759 ctx
.pc
+= insn_bytes
;
19761 /* Execute a branch and its delay slot as a single instruction.
19762 This is what GDB expects and is consistent with what the
19763 hardware does (e.g. if a delay slot instruction faults, the
19764 reported PC is the PC of the branch). */
19765 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19769 if (ctx
.pc
>= next_page_start
) {
19773 if (tcg_op_buf_full()) {
19777 if (num_insns
>= max_insns
)
19783 if (tb
->cflags
& CF_LAST_IO
) {
19786 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19787 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19788 gen_helper_raise_exception_debug(cpu_env
);
19790 switch (ctx
.bstate
) {
19792 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19795 save_cpu_state(&ctx
, 0);
19796 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19799 tcg_gen_exit_tb(0);
19807 gen_tb_end(tb
, num_insns
);
19809 tb
->size
= ctx
.pc
- pc_start
;
19810 tb
->icount
= num_insns
;
19814 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19815 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19816 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19822 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19826 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19828 #define printfpr(fp) \
19831 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19832 " fd:%13g fs:%13g psu: %13g\n", \
19833 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19834 (double)(fp)->fd, \
19835 (double)(fp)->fs[FP_ENDIAN_IDX], \
19836 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19839 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19840 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19841 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19842 " fd:%13g fs:%13g psu:%13g\n", \
19843 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19845 (double)tmp.fs[FP_ENDIAN_IDX], \
19846 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19851 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19852 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19853 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19854 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19855 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19856 printfpr(&env
->active_fpu
.fpr
[i
]);
19862 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19865 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19866 CPUMIPSState
*env
= &cpu
->env
;
19869 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19870 " LO=0x" TARGET_FMT_lx
" ds %04x "
19871 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19872 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19873 env
->hflags
, env
->btarget
, env
->bcond
);
19874 for (i
= 0; i
< 32; i
++) {
19876 cpu_fprintf(f
, "GPR%02d:", i
);
19877 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19879 cpu_fprintf(f
, "\n");
19882 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19883 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19884 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19886 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19887 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19888 env
->CP0_Config2
, env
->CP0_Config3
);
19889 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19890 env
->CP0_Config4
, env
->CP0_Config5
);
19891 if (env
->hflags
& MIPS_HFLAG_FPU
)
19892 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19895 void mips_tcg_init(void)
19900 /* Initialize various static tables. */
19904 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19906 TCGV_UNUSED(cpu_gpr
[0]);
19907 for (i
= 1; i
< 32; i
++)
19908 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
19909 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19912 for (i
= 0; i
< 32; i
++) {
19913 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19915 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
19916 /* The scalar floating-point unit (FPU) registers are mapped on
19917 * the MSA vector registers. */
19918 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19919 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19920 msa_wr_d
[i
* 2 + 1] =
19921 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
19924 cpu_PC
= tcg_global_mem_new(cpu_env
,
19925 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19926 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19927 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
19928 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19930 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
19931 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19934 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
19935 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19937 bcond
= tcg_global_mem_new(cpu_env
,
19938 offsetof(CPUMIPSState
, bcond
), "bcond");
19939 btarget
= tcg_global_mem_new(cpu_env
,
19940 offsetof(CPUMIPSState
, btarget
), "btarget");
19941 hflags
= tcg_global_mem_new_i32(cpu_env
,
19942 offsetof(CPUMIPSState
, hflags
), "hflags");
19944 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
19945 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19947 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
19948 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19954 #include "translate_init.c"
19956 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19960 const mips_def_t
*def
;
19962 def
= cpu_mips_find_by_name(cpu_model
);
19965 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19967 env
->cpu_model
= def
;
19969 #ifndef CONFIG_USER_ONLY
19970 mmu_init(env
, def
);
19972 fpu_init(env
, def
);
19973 mvp_init(env
, def
);
19975 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19980 void cpu_state_reset(CPUMIPSState
*env
)
19982 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19983 CPUState
*cs
= CPU(cpu
);
19985 /* Reset registers to their default values */
19986 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19987 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19988 #ifdef TARGET_WORDS_BIGENDIAN
19989 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19991 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19992 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19993 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19994 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19995 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19996 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19997 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19998 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19999 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20000 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20001 << env
->cpu_model
->CP0_LLAddr_shift
;
20002 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20003 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20004 env
->CCRes
= env
->cpu_model
->CCRes
;
20005 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20006 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20007 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20008 env
->current_tc
= 0;
20009 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20010 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20011 #if defined(TARGET_MIPS64)
20012 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20013 env
->SEGMask
|= 3ULL << 62;
20016 env
->PABITS
= env
->cpu_model
->PABITS
;
20017 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20018 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20019 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20020 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20021 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20022 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20023 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20024 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20025 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20026 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20027 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20028 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20029 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20030 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20031 env
->msair
= env
->cpu_model
->MSAIR
;
20032 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20034 #if defined(CONFIG_USER_ONLY)
20035 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20036 # ifdef TARGET_MIPS64
20037 /* Enable 64-bit register mode. */
20038 env
->CP0_Status
|= (1 << CP0St_PX
);
20040 # ifdef TARGET_ABI_MIPSN64
20041 /* Enable 64-bit address mode. */
20042 env
->CP0_Status
|= (1 << CP0St_UX
);
20044 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20045 hardware registers. */
20046 env
->CP0_HWREna
|= 0x0000000F;
20047 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20048 env
->CP0_Status
|= (1 << CP0St_CU1
);
20050 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20051 env
->CP0_Status
|= (1 << CP0St_MX
);
20053 # if defined(TARGET_MIPS64)
20054 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20055 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20056 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20057 env
->CP0_Status
|= (1 << CP0St_FR
);
20061 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20062 /* If the exception was raised from a delay slot,
20063 come back to the jump. */
20064 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20065 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20067 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20069 env
->active_tc
.PC
= (int32_t)0xBFC00000;
20070 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20071 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20072 env
->CP0_Wired
= 0;
20073 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20074 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20075 if (kvm_enabled()) {
20076 env
->CP0_EBase
|= 0x40000000;
20078 env
->CP0_EBase
|= 0x80000000;
20080 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20081 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20083 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20084 /* vectored interrupts not implemented, timer on int 7,
20085 no performance counters. */
20086 env
->CP0_IntCtl
= 0xe0000000;
20090 for (i
= 0; i
< 7; i
++) {
20091 env
->CP0_WatchLo
[i
] = 0;
20092 env
->CP0_WatchHi
[i
] = 0x80000000;
20094 env
->CP0_WatchLo
[7] = 0;
20095 env
->CP0_WatchHi
[7] = 0;
20097 /* Count register increments in debug mode, EJTAG version 1 */
20098 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20100 cpu_mips_store_count(env
, 1);
20102 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20105 /* Only TC0 on VPE 0 starts as active. */
20106 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20107 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20108 env
->tcs
[i
].CP0_TCHalt
= 1;
20110 env
->active_tc
.CP0_TCHalt
= 1;
20113 if (cs
->cpu_index
== 0) {
20114 /* VPE0 starts up enabled. */
20115 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20116 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20118 /* TC0 starts up unhalted. */
20120 env
->active_tc
.CP0_TCHalt
= 0;
20121 env
->tcs
[0].CP0_TCHalt
= 0;
20122 /* With thread 0 active. */
20123 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20124 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20128 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20129 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20130 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20131 env
->CP0_Status
|= (1 << CP0St_FR
);
20135 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20139 compute_hflags(env
);
20140 restore_rounding_mode(env
);
20141 restore_flush_mode(env
);
20142 restore_pamask(env
);
20143 cs
->exception_index
= EXCP_NONE
;
20145 if (semihosting_get_argc()) {
20146 /* UHI interface can be used to obtain argc and argv */
20147 env
->active_tc
.gpr
[4] = -1;
20151 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20152 target_ulong
*data
)
20154 env
->active_tc
.PC
= data
[0];
20155 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20156 env
->hflags
|= data
[1];
20157 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20158 case MIPS_HFLAG_BR
:
20160 case MIPS_HFLAG_BC
:
20161 case MIPS_HFLAG_BL
:
20163 env
->btarget
= data
[2];