2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL
= (0x00 << 26),
50 OPC_REGIMM
= (0x01 << 26),
51 OPC_CP0
= (0x10 << 26),
52 OPC_CP1
= (0x11 << 26),
53 OPC_CP2
= (0x12 << 26),
54 OPC_CP3
= (0x13 << 26),
55 OPC_SPECIAL2
= (0x1C << 26),
56 OPC_SPECIAL3
= (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI
= (0x08 << 26),
59 OPC_ADDIU
= (0x09 << 26),
60 OPC_SLTI
= (0x0A << 26),
61 OPC_SLTIU
= (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI
= (0x0C << 26),
64 OPC_ORI
= (0x0D << 26),
65 OPC_XORI
= (0x0E << 26),
66 OPC_LUI
= (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI
= (0x18 << 26),
69 OPC_DADDIU
= (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL
= (0x03 << 26),
73 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL
= (0x14 << 26),
75 OPC_BNE
= (0x05 << 26),
76 OPC_BNEL
= (0x15 << 26),
77 OPC_BLEZ
= (0x06 << 26),
78 OPC_BLEZL
= (0x16 << 26),
79 OPC_BGTZ
= (0x07 << 26),
80 OPC_BGTZL
= (0x17 << 26),
81 OPC_JALX
= (0x1D << 26),
82 OPC_DAUI
= (0x1D << 26),
84 OPC_LDL
= (0x1A << 26),
85 OPC_LDR
= (0x1B << 26),
86 OPC_LB
= (0x20 << 26),
87 OPC_LH
= (0x21 << 26),
88 OPC_LWL
= (0x22 << 26),
89 OPC_LW
= (0x23 << 26),
90 OPC_LWPC
= OPC_LW
| 0x5,
91 OPC_LBU
= (0x24 << 26),
92 OPC_LHU
= (0x25 << 26),
93 OPC_LWR
= (0x26 << 26),
94 OPC_LWU
= (0x27 << 26),
95 OPC_SB
= (0x28 << 26),
96 OPC_SH
= (0x29 << 26),
97 OPC_SWL
= (0x2A << 26),
98 OPC_SW
= (0x2B << 26),
99 OPC_SDL
= (0x2C << 26),
100 OPC_SDR
= (0x2D << 26),
101 OPC_SWR
= (0x2E << 26),
102 OPC_LL
= (0x30 << 26),
103 OPC_LLD
= (0x34 << 26),
104 OPC_LD
= (0x37 << 26),
105 OPC_LDPC
= OPC_LD
| 0x5,
106 OPC_SC
= (0x38 << 26),
107 OPC_SCD
= (0x3C << 26),
108 OPC_SD
= (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1
= (0x31 << 26),
111 OPC_LWC2
= (0x32 << 26),
112 OPC_LDC1
= (0x35 << 26),
113 OPC_LDC2
= (0x36 << 26),
114 OPC_SWC1
= (0x39 << 26),
115 OPC_SWC2
= (0x3A << 26),
116 OPC_SDC1
= (0x3D << 26),
117 OPC_SDC2
= (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC
= (0x06 << 26),
120 OPC_BGEZALC
= (0x06 << 26),
121 OPC_BGEUC
= (0x06 << 26),
122 OPC_BGTZALC
= (0x07 << 26),
123 OPC_BLTZALC
= (0x07 << 26),
124 OPC_BLTUC
= (0x07 << 26),
125 OPC_BOVC
= (0x08 << 26),
126 OPC_BEQZALC
= (0x08 << 26),
127 OPC_BEQC
= (0x08 << 26),
128 OPC_BLEZC
= (0x16 << 26),
129 OPC_BGEZC
= (0x16 << 26),
130 OPC_BGEC
= (0x16 << 26),
131 OPC_BGTZC
= (0x17 << 26),
132 OPC_BLTZC
= (0x17 << 26),
133 OPC_BLTC
= (0x17 << 26),
134 OPC_BNVC
= (0x18 << 26),
135 OPC_BNEZALC
= (0x18 << 26),
136 OPC_BNEC
= (0x18 << 26),
137 OPC_BC
= (0x32 << 26),
138 OPC_BEQZC
= (0x36 << 26),
139 OPC_JIC
= (0x36 << 26),
140 OPC_BALC
= (0x3A << 26),
141 OPC_BNEZC
= (0x3E << 26),
142 OPC_JIALC
= (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX
= (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE
= (0x2F << 26),
149 OPC_PREF
= (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL
= (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
160 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
161 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
165 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
168 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL
= 0x00 | OPC_SPECIAL
,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
181 OPC_ROTR
= OPC_SRL
| (1 << 21),
182 OPC_SRA
= 0x03 | OPC_SPECIAL
,
183 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
184 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
185 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
186 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
187 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
188 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
189 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
190 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
191 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
192 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
193 OPC_DROTR
= OPC_DSRL
| (1 << 21),
194 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
195 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
196 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
197 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
198 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
199 /* Multiplication / division */
200 OPC_MULT
= 0x18 | OPC_SPECIAL
,
201 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
202 OPC_DIV
= 0x1A | OPC_SPECIAL
,
203 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
204 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
205 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
206 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
207 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD
= 0x20 | OPC_SPECIAL
,
211 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
212 OPC_SUB
= 0x22 | OPC_SPECIAL
,
213 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
214 OPC_AND
= 0x24 | OPC_SPECIAL
,
215 OPC_OR
= 0x25 | OPC_SPECIAL
,
216 OPC_XOR
= 0x26 | OPC_SPECIAL
,
217 OPC_NOR
= 0x27 | OPC_SPECIAL
,
218 OPC_SLT
= 0x2A | OPC_SPECIAL
,
219 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
220 OPC_DADD
= 0x2C | OPC_SPECIAL
,
221 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
222 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
223 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
225 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
226 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
228 OPC_TGE
= 0x30 | OPC_SPECIAL
,
229 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
230 OPC_TLT
= 0x32 | OPC_SPECIAL
,
231 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
232 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
233 OPC_TNE
= 0x36 | OPC_SPECIAL
,
234 /* HI / LO registers load & stores */
235 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
236 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
237 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
238 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
239 /* Conditional moves */
240 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
241 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
243 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
244 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
246 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
249 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
250 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
251 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
252 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
253 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
255 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
256 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
257 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
258 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
267 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
268 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
269 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
270 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
271 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
272 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
273 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
275 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
276 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
277 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
278 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
279 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
280 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
281 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
282 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
284 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
285 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
286 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
287 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
288 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
290 OPC_LSA
= 0x05 | OPC_SPECIAL
,
291 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
301 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
302 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
303 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
304 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
305 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
306 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
307 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
308 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
309 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
310 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
311 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
319 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
320 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
321 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
322 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
323 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
324 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
325 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
326 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
327 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
328 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
329 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
330 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
331 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
332 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
333 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
335 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
336 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
345 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
346 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
347 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
348 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
350 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
351 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
352 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
353 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
354 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
355 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
356 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
357 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
358 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
359 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
360 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
361 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
363 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
364 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
365 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
366 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
368 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
376 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
377 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
378 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
379 OPC_INS
= 0x04 | OPC_SPECIAL3
,
380 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
381 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
382 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
383 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
384 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
385 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
386 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
387 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
390 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
391 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
392 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
393 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
394 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
395 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
396 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
397 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
398 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
399 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
400 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
401 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
404 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
407 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
408 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
413 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
416 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
421 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
424 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
427 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
430 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
433 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
434 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
435 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
436 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
437 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
438 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
439 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
440 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
441 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
442 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
443 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
444 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
445 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
446 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
447 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
448 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
451 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
452 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
453 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
454 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
455 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
456 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
464 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
465 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
466 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
467 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
468 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
469 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
470 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
474 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
477 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
478 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
479 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
480 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
481 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
487 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
490 /* MIPS DSP REGIMM opcodes */
492 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
493 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
496 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
500 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
501 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
502 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
505 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
507 /* MIPS DSP Arithmetic Sub-class */
508 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
509 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
515 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
522 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
526 /* MIPS DSP Multiply Sub-class insns */
527 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
535 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
536 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
538 /* MIPS DSP Arithmetic Sub-class */
539 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
540 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
551 /* MIPS DSP Multiply Sub-class insns */
552 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
558 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
560 /* MIPS DSP Arithmetic Sub-class */
561 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
562 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
574 /* DSP Bit/Manipulation Sub-class */
575 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
582 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
584 /* MIPS DSP Arithmetic Sub-class */
585 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
586 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 /* DSP Compare-Pick Sub-class */
593 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
610 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
612 /* MIPS DSP GPR-Based Shift Sub-class */
613 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
614 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
637 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Multiply Sub-class insns */
640 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
641 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
664 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
666 /* DSP Bit/Manipulation Sub-class */
667 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
670 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Append Sub-class */
673 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
674 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
675 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
678 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
680 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
681 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
682 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
693 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
695 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
696 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
697 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
700 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Arithmetic Sub-class */
703 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
704 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
720 /* DSP Bit/Manipulation Sub-class */
721 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
729 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
731 /* MIPS DSP Multiply Sub-class insns */
732 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
733 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
737 /* MIPS DSP Arithmetic Sub-class */
738 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
749 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
761 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Compare-Pick Sub-class */
764 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
765 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
783 /* MIPS DSP Arithmetic Sub-class */
784 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
794 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
796 /* DSP Append Sub-class */
797 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
798 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
800 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
803 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
805 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
806 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
807 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
829 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* DSP Bit/Manipulation Sub-class */
832 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
835 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
837 /* MIPS DSP Multiply Sub-class insns */
838 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
839 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
866 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
868 /* MIPS DSP GPR-Based Shift Sub-class */
869 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
870 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
897 /* Coprocessor 0 (rs field) */
898 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
901 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
902 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
903 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
904 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
905 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
906 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
907 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
908 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
909 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
910 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
911 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
912 OPC_C0
= (0x10 << 21) | OPC_CP0
,
913 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
914 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
915 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
916 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
917 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
918 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
919 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
920 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
921 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
922 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
923 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
924 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
925 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
926 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
927 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
931 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
934 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
935 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
937 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
938 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
939 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
941 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
944 /* Coprocessor 0 (with rs == C0) */
945 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
948 OPC_TLBR
= 0x01 | OPC_C0
,
949 OPC_TLBWI
= 0x02 | OPC_C0
,
950 OPC_TLBINV
= 0x03 | OPC_C0
,
951 OPC_TLBINVF
= 0x04 | OPC_C0
,
952 OPC_TLBWR
= 0x06 | OPC_C0
,
953 OPC_TLBP
= 0x08 | OPC_C0
,
954 OPC_RFE
= 0x10 | OPC_C0
,
955 OPC_ERET
= 0x18 | OPC_C0
,
956 OPC_DERET
= 0x1F | OPC_C0
,
957 OPC_WAIT
= 0x20 | OPC_C0
,
960 /* Coprocessor 1 (rs field) */
961 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
963 /* Values for the fmt field in FP instructions */
965 /* 0 - 15 are reserved */
966 FMT_S
= 16, /* single fp */
967 FMT_D
= 17, /* double fp */
968 FMT_E
= 18, /* extended fp */
969 FMT_Q
= 19, /* quad fp */
970 FMT_W
= 20, /* 32-bit fixed */
971 FMT_L
= 21, /* 64-bit fixed */
972 FMT_PS
= 22, /* paired single fp */
973 /* 23 - 31 are reserved */
977 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
978 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
979 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
980 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
981 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
982 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
983 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
984 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
985 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
986 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
987 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
988 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
989 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
990 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
991 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
992 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
993 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
994 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
995 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
996 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
997 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
998 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
999 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1000 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1001 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1002 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1003 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1004 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1005 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1006 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1009 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1010 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1013 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1014 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1015 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1016 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1020 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1021 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1025 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1026 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1029 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1032 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1033 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1034 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1035 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1036 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1037 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1038 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1039 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1040 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1041 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1042 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1045 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1048 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1049 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1058 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1067 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1070 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1071 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1072 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1073 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1075 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1076 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1085 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1092 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1099 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1106 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1113 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1120 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1127 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1134 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1142 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1145 OPC_LWXC1
= 0x00 | OPC_CP3
,
1146 OPC_LDXC1
= 0x01 | OPC_CP3
,
1147 OPC_LUXC1
= 0x05 | OPC_CP3
,
1148 OPC_SWXC1
= 0x08 | OPC_CP3
,
1149 OPC_SDXC1
= 0x09 | OPC_CP3
,
1150 OPC_SUXC1
= 0x0D | OPC_CP3
,
1151 OPC_PREFX
= 0x0F | OPC_CP3
,
1152 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1153 OPC_MADD_S
= 0x20 | OPC_CP3
,
1154 OPC_MADD_D
= 0x21 | OPC_CP3
,
1155 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1156 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1157 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1158 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1159 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1160 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1161 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1162 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1163 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1164 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1168 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1170 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1171 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1172 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1173 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1174 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1175 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1176 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1177 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1178 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1179 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1180 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1181 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1182 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1183 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1184 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1185 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1186 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1187 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1188 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1189 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1190 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1192 /* MI10 instruction */
1193 OPC_LD_B
= (0x20) | OPC_MSA
,
1194 OPC_LD_H
= (0x21) | OPC_MSA
,
1195 OPC_LD_W
= (0x22) | OPC_MSA
,
1196 OPC_LD_D
= (0x23) | OPC_MSA
,
1197 OPC_ST_B
= (0x24) | OPC_MSA
,
1198 OPC_ST_H
= (0x25) | OPC_MSA
,
1199 OPC_ST_W
= (0x26) | OPC_MSA
,
1200 OPC_ST_D
= (0x27) | OPC_MSA
,
1204 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1205 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1206 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1207 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1208 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1209 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1210 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1211 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1212 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1213 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1214 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1215 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1216 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1218 /* I8 instruction */
1219 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1220 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1221 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1222 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1225 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1226 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1228 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1230 /* VEC/2R/2RF instruction */
1231 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1232 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1233 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1234 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1235 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1236 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1237 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1239 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1242 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1243 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1244 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1245 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1246 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1248 /* 2RF instruction df(bit 16) = _w, _d */
1249 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1250 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1252 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1253 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1254 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1255 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1256 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1257 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1259 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1260 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1261 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1263 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1266 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1267 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1268 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1270 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1271 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1272 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1273 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1274 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1275 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1276 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1277 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1278 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1279 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1280 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1281 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1282 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1283 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1284 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1285 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1286 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1287 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1288 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1289 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1290 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1291 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1292 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1293 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1294 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1295 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1296 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1297 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1298 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1299 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1300 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1301 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1302 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1303 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1304 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1305 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1306 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1307 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1308 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1309 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1310 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1311 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1312 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1313 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1314 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1315 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1316 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1317 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1318 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1319 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1320 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1321 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1322 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1323 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1324 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1325 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1326 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1327 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1328 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1329 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1331 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1332 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1333 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1334 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1335 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1336 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1337 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1338 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1339 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 /* 3RF instruction _df(bit 21) = _w, _d */
1343 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1344 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1345 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1346 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1347 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1348 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1362 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1363 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1364 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1365 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1366 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1367 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1368 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1369 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1370 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1373 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1374 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1375 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1385 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1386 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1387 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1388 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1389 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1390 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1391 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1392 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1393 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1394 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1395 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1403 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1404 * ============================================
1407 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1408 * instructions set. It is designed to fit the needs of signal, graphical and
1409 * video processing applications. MXU instruction set is used in Xburst family
1410 * of microprocessors by Ingenic.
1412 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1413 * the control register.
1416 * The notation used in MXU assembler mnemonics
1417 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1419 * Register operands:
1421 * XRa, XRb, XRc, XRd - MXU registers
1422 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1424 * Non-register operands:
1426 * aptn1 - 1-bit accumulate add/subtract pattern
1427 * aptn2 - 2-bit accumulate add/subtract pattern
1428 * eptn2 - 2-bit execute add/subtract pattern
1429 * optn2 - 2-bit operand pattern
1430 * optn3 - 3-bit operand pattern
1431 * sft4 - 4-bit shift amount
1432 * strd2 - 2-bit stride amount
1436 * Level of parallelism: Operand size:
1437 * S - single operation at a time 32 - word
1438 * D - two operations in parallel 16 - half word
1439 * Q - four operations in parallel 8 - byte
1443 * ADD - Add or subtract
1444 * ADDC - Add with carry-in
1446 * ASUM - Sum together then accumulate (add or subtract)
1447 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1448 * AVG - Average between 2 operands
1449 * ABD - Absolute difference
1451 * AND - Logical bitwise 'and' operation
1453 * EXTR - Extract bits
1454 * I2M - Move from GPR register to MXU register
1455 * LDD - Load data from memory to XRF
1456 * LDI - Load data from memory to XRF (and increase the address base)
1457 * LUI - Load unsigned immediate
1459 * MULU - Unsigned multiply
1460 * MADD - 64-bit operand add 32x32 product
1461 * MSUB - 64-bit operand subtract 32x32 product
1462 * MAC - Multiply and accumulate (add or subtract)
1463 * MAD - Multiply and add or subtract
1464 * MAX - Maximum between 2 operands
1465 * MIN - Minimum between 2 operands
1466 * M2I - Move from MXU register to GPR register
1467 * MOVZ - Move if zero
1468 * MOVN - Move if non-zero
1469 * NOR - Logical bitwise 'nor' operation
1470 * OR - Logical bitwise 'or' operation
1471 * STD - Store data from XRF to memory
1472 * SDI - Store data from XRF to memory (and increase the address base)
1473 * SLT - Set of less than comparison
1474 * SAD - Sum of absolute differences
1475 * SLL - Logical shift left
1476 * SLR - Logical shift right
1477 * SAR - Arithmetic shift right
1480 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1481 * XOR - Logical bitwise 'exclusive or' operation
1485 * E - Expand results
1486 * F - Fixed point multiplication
1487 * L - Low part result
1488 * R - Doing rounding
1489 * V - Variable instead of immediate
1490 * W - Combine above L and V
1493 * The list of MXU instructions grouped by functionality
1494 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1496 * Load/Store instructions Multiplication instructions
1497 * ----------------------- ---------------------------
1499 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1500 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1501 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1502 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1503 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1504 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1505 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1506 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1507 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1508 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1509 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1513 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1514 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1515 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1516 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1517 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1518 * S16SDI XRa, Rb, s10, eptn2
1519 * S8LDD XRa, Rb, s8, eptn3
1520 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1521 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1522 * S8SDI XRa, Rb, s8, eptn3
1523 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1524 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1525 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1526 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1527 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1528 * S32CPS XRa, XRb, XRc
1529 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1530 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1531 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1532 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1533 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1534 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1535 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1536 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1537 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1538 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1539 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1540 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1541 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1542 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1543 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1544 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1545 * Q8SLT XRa, XRb, XRc
1546 * Q8SLTU XRa, XRb, XRc
1547 * Q8MOVZ XRa, XRb, XRc Shift instructions
1548 * Q8MOVN XRa, XRb, XRc ------------------
1550 * D32SLL XRa, XRb, XRc, XRd, sft4
1551 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1552 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1553 * D32SARL XRa, XRb, XRc, sft4
1554 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1555 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1556 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1557 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1558 * Q16SLL XRa, XRb, XRc, XRd, sft4
1559 * Q16SLR XRa, XRb, XRc, XRd, sft4
1560 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1561 * ------------------------- Q16SLLV XRa, XRb, Rb
1562 * Q16SLRV XRa, XRb, Rb
1563 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1564 * S32ALN XRa, XRb, XRc, Rb
1565 * S32ALNI XRa, XRb, XRc, s3
1566 * S32LUI XRa, s8, optn3 Move instructions
1567 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1568 * S32EXTRV XRa, XRb, Rs, Rt
1569 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1570 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1573 * The opcode organization of MXU instructions
1574 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1576 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1577 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1578 * other bits up to the instruction level is as follows:
1583 * ┌─ 000000 ─ OPC_MXU_S32MADD
1584 * ├─ 000001 ─ OPC_MXU_S32MADDU
1585 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1588 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1589 * │ ├─ 001 ─ OPC_MXU_S32MIN
1590 * │ ├─ 010 ─ OPC_MXU_D16MAX
1591 * │ ├─ 011 ─ OPC_MXU_D16MIN
1592 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1593 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1594 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1595 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1596 * ├─ 000100 ─ OPC_MXU_S32MSUB
1597 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1598 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1599 * │ ├─ 001 ─ OPC_MXU_D16SLT
1600 * │ ├─ 010 ─ OPC_MXU_D16AVG
1601 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1602 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1603 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1604 * │ └─ 111 ─ OPC_MXU_Q8ADD
1607 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1608 * │ ├─ 010 ─ OPC_MXU_D16CPS
1609 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1610 * │ └─ 110 ─ OPC_MXU_Q16SAT
1611 * ├─ 001000 ─ OPC_MXU_D16MUL
1613 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1614 * │ └─ 01 ─ OPC_MXU_D16MULE
1615 * ├─ 001010 ─ OPC_MXU_D16MAC
1616 * ├─ 001011 ─ OPC_MXU_D16MACF
1617 * ├─ 001100 ─ OPC_MXU_D16MADL
1618 * ├─ 001101 ─ OPC_MXU_S16MAD
1619 * ├─ 001110 ─ OPC_MXU_Q16ADD
1620 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1621 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1622 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1625 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1626 * │ └─ 1 ─ OPC_MXU_S32STDR
1629 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1630 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1633 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1634 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1637 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1638 * │ └─ 1 ─ OPC_MXU_S32LDIR
1641 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1642 * │ └─ 1 ─ OPC_MXU_S32SDIR
1645 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1646 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1649 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1650 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1651 * ├─ 011000 ─ OPC_MXU_D32ADD
1653 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1654 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1655 * │ └─ 10 ─ OPC_MXU_D32ASUM
1656 * ├─ 011010 ─ <not assigned>
1658 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1659 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1660 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1663 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1664 * │ ├─ 01 ─ OPC_MXU_D8SUM
1665 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1666 * ├─ 011110 ─ <not assigned>
1667 * ├─ 011111 ─ <not assigned>
1668 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1669 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1670 * ├─ 100010 ─ OPC_MXU_S8LDD
1671 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1672 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1673 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1674 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1675 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1678 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1679 * │ ├─ 001 ─ OPC_MXU_S32ALN
1680 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1681 * │ ├─ 011 ─ OPC_MXU_S32LUI
1682 * │ ├─ 100 ─ OPC_MXU_S32NOR
1683 * │ ├─ 101 ─ OPC_MXU_S32AND
1684 * │ ├─ 110 ─ OPC_MXU_S32OR
1685 * │ └─ 111 ─ OPC_MXU_S32XOR
1688 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1689 * │ ├─ 001 ─ OPC_MXU_LXH
1690 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1691 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1692 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1693 * ├─ 101100 ─ OPC_MXU_S16LDI
1694 * ├─ 101101 ─ OPC_MXU_S16SDI
1695 * ├─ 101110 ─ OPC_MXU_S32M2I
1696 * ├─ 101111 ─ OPC_MXU_S32I2M
1697 * ├─ 110000 ─ OPC_MXU_D32SLL
1698 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1699 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1700 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1701 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1702 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1703 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1704 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1706 * ├─ 110111 ─ OPC_MXU_Q16SAR
1708 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1709 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1712 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1713 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1714 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1715 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1716 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1717 * │ └─ 101 ─ OPC_MXU_S32MOVN
1720 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1721 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1722 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1723 * ├─ 111100 ─ OPC_MXU_Q8MADL
1724 * ├─ 111101 ─ OPC_MXU_S32SFL
1725 * ├─ 111110 ─ OPC_MXU_Q8SAD
1726 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1731 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1732 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1736 OPC_MXU_S32MADD
= 0x00,
1737 OPC_MXU_S32MADDU
= 0x01,
1738 OPC__MXU_MUL
= 0x02,
1739 OPC_MXU__POOL00
= 0x03,
1740 OPC_MXU_S32MSUB
= 0x04,
1741 OPC_MXU_S32MSUBU
= 0x05,
1742 OPC_MXU__POOL01
= 0x06,
1743 OPC_MXU__POOL02
= 0x07,
1744 OPC_MXU_D16MUL
= 0x08,
1745 OPC_MXU__POOL03
= 0x09,
1746 OPC_MXU_D16MAC
= 0x0A,
1747 OPC_MXU_D16MACF
= 0x0B,
1748 OPC_MXU_D16MADL
= 0x0C,
1749 OPC_MXU_S16MAD
= 0x0D,
1750 OPC_MXU_Q16ADD
= 0x0E,
1751 OPC_MXU_D16MACE
= 0x0F,
1752 OPC_MXU__POOL04
= 0x10,
1753 OPC_MXU__POOL05
= 0x11,
1754 OPC_MXU__POOL06
= 0x12,
1755 OPC_MXU__POOL07
= 0x13,
1756 OPC_MXU__POOL08
= 0x14,
1757 OPC_MXU__POOL09
= 0x15,
1758 OPC_MXU__POOL10
= 0x16,
1759 OPC_MXU__POOL11
= 0x17,
1760 OPC_MXU_D32ADD
= 0x18,
1761 OPC_MXU__POOL12
= 0x19,
1762 /* not assigned 0x1A */
1763 OPC_MXU__POOL13
= 0x1B,
1764 OPC_MXU__POOL14
= 0x1C,
1765 OPC_MXU_Q8ACCE
= 0x1D,
1766 /* not assigned 0x1E */
1767 /* not assigned 0x1F */
1768 /* not assigned 0x20 */
1769 /* not assigned 0x21 */
1770 OPC_MXU_S8LDD
= 0x22,
1771 OPC_MXU_S8STD
= 0x23,
1772 OPC_MXU_S8LDI
= 0x24,
1773 OPC_MXU_S8SDI
= 0x25,
1774 OPC_MXU__POOL15
= 0x26,
1775 OPC_MXU__POOL16
= 0x27,
1776 OPC_MXU__POOL17
= 0x28,
1777 /* not assigned 0x29 */
1778 OPC_MXU_S16LDD
= 0x2A,
1779 OPC_MXU_S16STD
= 0x2B,
1780 OPC_MXU_S16LDI
= 0x2C,
1781 OPC_MXU_S16SDI
= 0x2D,
1782 OPC_MXU_S32M2I
= 0x2E,
1783 OPC_MXU_S32I2M
= 0x2F,
1784 OPC_MXU_D32SLL
= 0x30,
1785 OPC_MXU_D32SLR
= 0x31,
1786 OPC_MXU_D32SARL
= 0x32,
1787 OPC_MXU_D32SAR
= 0x33,
1788 OPC_MXU_Q16SLL
= 0x34,
1789 OPC_MXU_Q16SLR
= 0x35,
1790 OPC_MXU__POOL18
= 0x36,
1791 OPC_MXU_Q16SAR
= 0x37,
1792 OPC_MXU__POOL19
= 0x38,
1793 OPC_MXU__POOL20
= 0x39,
1794 OPC_MXU__POOL21
= 0x3A,
1795 OPC_MXU_Q16SCOP
= 0x3B,
1796 OPC_MXU_Q8MADL
= 0x3C,
1797 OPC_MXU_S32SFL
= 0x3D,
1798 OPC_MXU_Q8SAD
= 0x3E,
1799 /* not assigned 0x3F */
1807 OPC_MXU_S32MAX
= 0x00,
1808 OPC_MXU_S32MIN
= 0x01,
1809 OPC_MXU_D16MAX
= 0x02,
1810 OPC_MXU_D16MIN
= 0x03,
1811 OPC_MXU_Q8MAX
= 0x04,
1812 OPC_MXU_Q8MIN
= 0x05,
1813 OPC_MXU_Q8SLT
= 0x06,
1814 OPC_MXU_Q8SLTU
= 0x07,
1821 OPC_MXU_S32SLT
= 0x00,
1822 OPC_MXU_D16SLT
= 0x01,
1823 OPC_MXU_D16AVG
= 0x02,
1824 OPC_MXU_D16AVGR
= 0x03,
1825 OPC_MXU_Q8AVG
= 0x04,
1826 OPC_MXU_Q8AVGR
= 0x05,
1827 OPC_MXU_Q8ADD
= 0x07,
1834 OPC_MXU_S32CPS
= 0x00,
1835 OPC_MXU_D16CPS
= 0x02,
1836 OPC_MXU_Q8ABD
= 0x04,
1837 OPC_MXU_Q16SAT
= 0x06,
1844 OPC_MXU_D16MULF
= 0x00,
1845 OPC_MXU_D16MULE
= 0x01,
1852 OPC_MXU_S32LDD
= 0x00,
1853 OPC_MXU_S32LDDR
= 0x01,
1860 OPC_MXU_S32STD
= 0x00,
1861 OPC_MXU_S32STDR
= 0x01,
1868 OPC_MXU_S32LDDV
= 0x00,
1869 OPC_MXU_S32LDDVR
= 0x01,
1876 OPC_MXU_S32STDV
= 0x00,
1877 OPC_MXU_S32STDVR
= 0x01,
1884 OPC_MXU_S32LDI
= 0x00,
1885 OPC_MXU_S32LDIR
= 0x01,
1892 OPC_MXU_S32SDI
= 0x00,
1893 OPC_MXU_S32SDIR
= 0x01,
1900 OPC_MXU_S32LDIV
= 0x00,
1901 OPC_MXU_S32LDIVR
= 0x01,
1908 OPC_MXU_S32SDIV
= 0x00,
1909 OPC_MXU_S32SDIVR
= 0x01,
1916 OPC_MXU_D32ACC
= 0x00,
1917 OPC_MXU_D32ACCM
= 0x01,
1918 OPC_MXU_D32ASUM
= 0x02,
1925 OPC_MXU_Q16ACC
= 0x00,
1926 OPC_MXU_Q16ACCM
= 0x01,
1927 OPC_MXU_Q16ASUM
= 0x02,
1934 OPC_MXU_Q8ADDE
= 0x00,
1935 OPC_MXU_D8SUM
= 0x01,
1936 OPC_MXU_D8SUMC
= 0x02,
1943 OPC_MXU_S32MUL
= 0x00,
1944 OPC_MXU_S32MULU
= 0x01,
1945 OPC_MXU_S32EXTR
= 0x02,
1946 OPC_MXU_S32EXTRV
= 0x03,
1953 OPC_MXU_D32SARW
= 0x00,
1954 OPC_MXU_S32ALN
= 0x01,
1955 OPC_MXU_S32ALNI
= 0x02,
1956 OPC_MXU_S32LUI
= 0x03,
1957 OPC_MXU_S32NOR
= 0x04,
1958 OPC_MXU_S32AND
= 0x05,
1959 OPC_MXU_S32OR
= 0x06,
1960 OPC_MXU_S32XOR
= 0x07,
1970 OPC_MXU_LXBU
= 0x04,
1971 OPC_MXU_LXHU
= 0x05,
1978 OPC_MXU_D32SLLV
= 0x00,
1979 OPC_MXU_D32SLRV
= 0x01,
1980 OPC_MXU_D32SARV
= 0x03,
1981 OPC_MXU_Q16SLLV
= 0x04,
1982 OPC_MXU_Q16SLRV
= 0x05,
1983 OPC_MXU_Q16SARV
= 0x07,
1990 OPC_MXU_Q8MUL
= 0x00,
1991 OPC_MXU_Q8MULSU
= 0x01,
1998 OPC_MXU_Q8MOVZ
= 0x00,
1999 OPC_MXU_Q8MOVN
= 0x01,
2000 OPC_MXU_D16MOVZ
= 0x02,
2001 OPC_MXU_D16MOVN
= 0x03,
2002 OPC_MXU_S32MOVZ
= 0x04,
2003 OPC_MXU_S32MOVN
= 0x05,
2010 OPC_MXU_Q8MAC
= 0x00,
2011 OPC_MXU_Q8MACSU
= 0x01,
2015 * Overview of the TX79-specific instruction set
2016 * =============================================
2018 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2019 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2020 * instructions and certain multimedia instructions (MMIs). These MMIs
2021 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2022 * or sixteen 8-bit paths.
2026 * The Toshiba TX System RISC TX79 Core Architecture manual,
2027 * https://wiki.qemu.org/File:C790.pdf
2029 * Three-Operand Multiply and Multiply-Add (4 instructions)
2030 * --------------------------------------------------------
2031 * MADD [rd,] rs, rt Multiply/Add
2032 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2033 * MULT [rd,] rs, rt Multiply (3-operand)
2034 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2036 * Multiply Instructions for Pipeline 1 (10 instructions)
2037 * ------------------------------------------------------
2038 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2039 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2040 * DIV1 rs, rt Divide Pipeline 1
2041 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2042 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2043 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2044 * MFHI1 rd Move From HI1 Register
2045 * MFLO1 rd Move From LO1 Register
2046 * MTHI1 rs Move To HI1 Register
2047 * MTLO1 rs Move To LO1 Register
2049 * Arithmetic (19 instructions)
2050 * ----------------------------
2051 * PADDB rd, rs, rt Parallel Add Byte
2052 * PSUBB rd, rs, rt Parallel Subtract Byte
2053 * PADDH rd, rs, rt Parallel Add Halfword
2054 * PSUBH rd, rs, rt Parallel Subtract Halfword
2055 * PADDW rd, rs, rt Parallel Add Word
2056 * PSUBW rd, rs, rt Parallel Subtract Word
2057 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2058 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2059 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2060 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2061 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2062 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2063 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2064 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2065 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2066 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2067 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2068 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2069 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2071 * Min/Max (4 instructions)
2072 * ------------------------
2073 * PMAXH rd, rs, rt Parallel Maximum Halfword
2074 * PMINH rd, rs, rt Parallel Minimum Halfword
2075 * PMAXW rd, rs, rt Parallel Maximum Word
2076 * PMINW rd, rs, rt Parallel Minimum Word
2078 * Absolute (2 instructions)
2079 * -------------------------
2080 * PABSH rd, rt Parallel Absolute Halfword
2081 * PABSW rd, rt Parallel Absolute Word
2083 * Logical (4 instructions)
2084 * ------------------------
2085 * PAND rd, rs, rt Parallel AND
2086 * POR rd, rs, rt Parallel OR
2087 * PXOR rd, rs, rt Parallel XOR
2088 * PNOR rd, rs, rt Parallel NOR
2090 * Shift (9 instructions)
2091 * ----------------------
2092 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2093 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2094 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2095 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2096 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2097 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2098 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2099 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2100 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2102 * Compare (6 instructions)
2103 * ------------------------
2104 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2105 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2106 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2107 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2108 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2109 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2111 * LZC (1 instruction)
2112 * -------------------
2113 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2115 * Quadword Load and Store (2 instructions)
2116 * ----------------------------------------
2117 * LQ rt, offset(base) Load Quadword
2118 * SQ rt, offset(base) Store Quadword
2120 * Multiply and Divide (19 instructions)
2121 * -------------------------------------
2122 * PMULTW rd, rs, rt Parallel Multiply Word
2123 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2124 * PDIVW rs, rt Parallel Divide Word
2125 * PDIVUW rs, rt Parallel Divide Unsigned Word
2126 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2127 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2128 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2129 * PMULTH rd, rs, rt Parallel Multiply Halfword
2130 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2131 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2132 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2133 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2134 * PDIVBW rs, rt Parallel Divide Broadcast Word
2135 * PMFHI rd Parallel Move From HI Register
2136 * PMFLO rd Parallel Move From LO Register
2137 * PMTHI rs Parallel Move To HI Register
2138 * PMTLO rs Parallel Move To LO Register
2139 * PMFHL rd Parallel Move From HI/LO Register
2140 * PMTHL rs Parallel Move To HI/LO Register
2142 * Pack/Extend (11 instructions)
2143 * -----------------------------
2144 * PPAC5 rd, rt Parallel Pack to 5 bits
2145 * PPACB rd, rs, rt Parallel Pack to Byte
2146 * PPACH rd, rs, rt Parallel Pack to Halfword
2147 * PPACW rd, rs, rt Parallel Pack to Word
2148 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2149 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2150 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2151 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2152 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2153 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2154 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2156 * Others (16 instructions)
2157 * ------------------------
2158 * PCPYH rd, rt Parallel Copy Halfword
2159 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2160 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2161 * PREVH rd, rt Parallel Reverse Halfword
2162 * PINTH rd, rs, rt Parallel Interleave Halfword
2163 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2164 * PEXEH rd, rt Parallel Exchange Even Halfword
2165 * PEXCH rd, rt Parallel Exchange Center Halfword
2166 * PEXEW rd, rt Parallel Exchange Even Word
2167 * PEXCW rd, rt Parallel Exchange Center Word
2168 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2169 * MFSA rd Move from Shift Amount Register
2170 * MTSA rs Move to Shift Amount Register
2171 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2172 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2173 * PROT3W rd, rt Parallel Rotate 3 Words
2175 * MMI (MultiMedia Instruction) encodings
2176 * ======================================
2178 * MMI instructions encoding table keys:
2180 * * This code is reserved for future use. An attempt to execute it
2181 * causes a Reserved Instruction exception.
2182 * % This code indicates an instruction class. The instruction word
2183 * must be further decoded by examining additional tables that show
2184 * the values for other instruction fields.
2185 * # This code is reserved for the unsupported instructions DMULT,
2186 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2187 * to execute it causes a Reserved Instruction exception.
2189 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2192 * +--------+----------------------------------------+
2194 * +--------+----------------------------------------+
2196 * opcode bits 28..26
2197 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2198 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2199 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2200 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2201 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2202 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2203 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2204 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2205 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2206 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2207 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2211 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2212 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2213 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2217 * MMI instructions with opcode field = MMI:
2220 * +--------+-------------------------------+--------+
2221 * | MMI | |function|
2222 * +--------+-------------------------------+--------+
2224 * function bits 2..0
2225 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2226 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2227 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2228 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2229 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2230 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2231 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2232 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2233 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2234 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2235 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2238 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2240 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2241 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2242 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2243 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2246 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2247 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2248 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2249 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2250 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2251 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2252 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2253 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2254 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2268 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2271 * +--------+----------------------+--------+--------+
2272 * | MMI | |function| MMI0 |
2273 * +--------+----------------------+--------+--------+
2275 * function bits 7..6
2276 * bits | 0 | 1 | 2 | 3
2277 * 10..8 | 00 | 01 | 10 | 11
2278 * -------+-------+-------+-------+-------
2279 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2280 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2281 * 2 010 | PADDB | PSUBB | PCGTB | *
2282 * 3 011 | * | * | * | *
2283 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2284 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2285 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2286 * 7 111 | * | * | PEXT5 | PPAC5
2289 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2291 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2292 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2319 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2322 * +--------+----------------------+--------+--------+
2323 * | MMI | |function| MMI1 |
2324 * +--------+----------------------+--------+--------+
2326 * function bits 7..6
2327 * bits | 0 | 1 | 2 | 3
2328 * 10..8 | 00 | 01 | 10 | 11
2329 * -------+-------+-------+-------+-------
2330 * 0 000 | * | PABSW | PCEQW | PMINW
2331 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2332 * 2 010 | * | * | PCEQB | *
2333 * 3 011 | * | * | * | *
2334 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2335 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2336 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2337 * 7 111 | * | * | * | *
2340 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2342 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2343 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2363 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2366 * +--------+----------------------+--------+--------+
2367 * | MMI | |function| MMI2 |
2368 * +--------+----------------------+--------+--------+
2370 * function bits 7..6
2371 * bits | 0 | 1 | 2 | 3
2372 * 10..8 | 00 | 01 | 10 | 11
2373 * -------+-------+-------+-------+-------
2374 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2375 * 1 001 | PMSUBW| * | * | *
2376 * 2 010 | PMFHI | PMFLO | PINTH | *
2377 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2378 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2379 * 5 101 | PMSUBH| PHMSBH| * | *
2380 * 6 110 | * | * | PEXEH | PREVH
2381 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2384 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2386 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2387 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2411 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2414 * +--------+----------------------+--------+--------+
2415 * | MMI | |function| MMI3 |
2416 * +--------+----------------------+--------+--------+
2418 * function bits 7..6
2419 * bits | 0 | 1 | 2 | 3
2420 * 10..8 | 00 | 01 | 10 | 11
2421 * -------+-------+-------+-------+-------
2422 * 0 000 |PMADDUW| * | * | PSRAVW
2423 * 1 001 | * | * | * | *
2424 * 2 010 | PMTHI | PMTLO | PINTEH| *
2425 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2426 * 4 100 | * | * | POR | PNOR
2427 * 5 101 | * | * | * | *
2428 * 6 110 | * | * | PEXCH | PCPYH
2429 * 7 111 | * | * | PEXCW | *
2432 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2434 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2435 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2449 /* global register indices */
2450 static TCGv cpu_gpr
[32], cpu_PC
;
2451 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2452 static TCGv cpu_dspctrl
, btarget
, bcond
;
2453 static TCGv_i32 hflags
;
2454 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2455 static TCGv_i64 fpu_f64
[32];
2456 static TCGv_i64 msa_wr_d
[64];
2458 #if !defined(TARGET_MIPS64)
2460 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2464 #include "exec/gen-icount.h"
2466 #define gen_helper_0e0i(name, arg) do { \
2467 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2468 gen_helper_##name(cpu_env, helper_tmp); \
2469 tcg_temp_free_i32(helper_tmp); \
2472 #define gen_helper_0e1i(name, arg1, arg2) do { \
2473 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2474 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2475 tcg_temp_free_i32(helper_tmp); \
2478 #define gen_helper_1e0i(name, ret, arg1) do { \
2479 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2480 gen_helper_##name(ret, cpu_env, helper_tmp); \
2481 tcg_temp_free_i32(helper_tmp); \
2484 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2485 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2486 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2487 tcg_temp_free_i32(helper_tmp); \
2490 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2491 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2492 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2493 tcg_temp_free_i32(helper_tmp); \
2496 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2497 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2498 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2499 tcg_temp_free_i32(helper_tmp); \
2502 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2503 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2504 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2505 tcg_temp_free_i32(helper_tmp); \
2508 typedef struct DisasContext
{
2509 DisasContextBase base
;
2510 target_ulong saved_pc
;
2511 target_ulong page_start
;
2513 uint64_t insn_flags
;
2514 int32_t CP0_Config1
;
2515 int32_t CP0_Config2
;
2516 int32_t CP0_Config3
;
2517 int32_t CP0_Config5
;
2518 /* Routine used to access memory */
2520 TCGMemOp default_tcg_memop_mask
;
2521 uint32_t hflags
, saved_hflags
;
2522 target_ulong btarget
;
2533 int CP0_LLAddr_shift
;
2542 #define DISAS_STOP DISAS_TARGET_0
2543 #define DISAS_EXIT DISAS_TARGET_1
2545 static const char * const regnames
[] = {
2546 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2547 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2548 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2549 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2552 static const char * const regnames_HI
[] = {
2553 "HI0", "HI1", "HI2", "HI3",
2556 static const char * const regnames_LO
[] = {
2557 "LO0", "LO1", "LO2", "LO3",
2560 static const char * const fregnames
[] = {
2561 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2562 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2563 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2564 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2567 static const char * const msaregnames
[] = {
2568 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2569 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2570 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2571 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2572 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2573 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2574 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2575 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2576 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2577 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2578 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2579 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2580 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2581 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2582 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2583 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2586 #if !defined(TARGET_MIPS64)
2587 static const char * const mxuregnames
[] = {
2588 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2589 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2593 #define LOG_DISAS(...) \
2595 if (MIPS_DEBUG_DISAS) { \
2596 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2600 #define MIPS_INVAL(op) \
2602 if (MIPS_DEBUG_DISAS) { \
2603 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2604 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2605 ctx->base.pc_next, ctx->opcode, op, \
2606 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2607 ((ctx->opcode >> 16) & 0x1F)); \
2611 /* General purpose registers moves. */
2612 static inline void gen_load_gpr (TCGv t
, int reg
)
2615 tcg_gen_movi_tl(t
, 0);
2617 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2620 static inline void gen_store_gpr (TCGv t
, int reg
)
2623 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2626 /* Moves to/from shadow registers. */
2627 static inline void gen_load_srsgpr (int from
, int to
)
2629 TCGv t0
= tcg_temp_new();
2632 tcg_gen_movi_tl(t0
, 0);
2634 TCGv_i32 t2
= tcg_temp_new_i32();
2635 TCGv_ptr addr
= tcg_temp_new_ptr();
2637 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2638 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2639 tcg_gen_andi_i32(t2
, t2
, 0xf);
2640 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2641 tcg_gen_ext_i32_ptr(addr
, t2
);
2642 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2644 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2645 tcg_temp_free_ptr(addr
);
2646 tcg_temp_free_i32(t2
);
2648 gen_store_gpr(t0
, to
);
2652 static inline void gen_store_srsgpr (int from
, int to
)
2655 TCGv t0
= tcg_temp_new();
2656 TCGv_i32 t2
= tcg_temp_new_i32();
2657 TCGv_ptr addr
= tcg_temp_new_ptr();
2659 gen_load_gpr(t0
, from
);
2660 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2661 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2662 tcg_gen_andi_i32(t2
, t2
, 0xf);
2663 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2664 tcg_gen_ext_i32_ptr(addr
, t2
);
2665 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2667 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2668 tcg_temp_free_ptr(addr
);
2669 tcg_temp_free_i32(t2
);
2674 #if !defined(TARGET_MIPS64)
2675 /* MXU General purpose registers moves. */
2676 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2679 tcg_gen_movi_tl(t
, 0);
2680 } else if (reg
<= 15) {
2681 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2685 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2687 if (reg
> 0 && reg
<= 15) {
2688 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2692 /* MXU control register moves. */
2693 static inline void gen_load_mxu_cr(TCGv t
)
2695 tcg_gen_mov_tl(t
, mxu_CR
);
2698 static inline void gen_store_mxu_cr(TCGv t
)
2700 /* TODO: Add handling of RW rules for MXU_CR. */
2701 tcg_gen_mov_tl(mxu_CR
, t
);
2707 static inline void gen_save_pc(target_ulong pc
)
2709 tcg_gen_movi_tl(cpu_PC
, pc
);
2712 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2714 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2715 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2716 gen_save_pc(ctx
->base
.pc_next
);
2717 ctx
->saved_pc
= ctx
->base
.pc_next
;
2719 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2720 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2721 ctx
->saved_hflags
= ctx
->hflags
;
2722 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2728 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2734 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2736 ctx
->saved_hflags
= ctx
->hflags
;
2737 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2743 ctx
->btarget
= env
->btarget
;
2748 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2750 TCGv_i32 texcp
= tcg_const_i32(excp
);
2751 TCGv_i32 terr
= tcg_const_i32(err
);
2752 save_cpu_state(ctx
, 1);
2753 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2754 tcg_temp_free_i32(terr
);
2755 tcg_temp_free_i32(texcp
);
2756 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2759 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2761 gen_helper_0e0i(raise_exception
, excp
);
2764 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2766 generate_exception_err(ctx
, excp
, 0);
2769 /* Floating point register moves. */
2770 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2772 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2773 generate_exception(ctx
, EXCP_RI
);
2775 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2778 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2781 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2782 generate_exception(ctx
, EXCP_RI
);
2784 t64
= tcg_temp_new_i64();
2785 tcg_gen_extu_i32_i64(t64
, t
);
2786 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2787 tcg_temp_free_i64(t64
);
2790 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2792 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2793 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2795 gen_load_fpr32(ctx
, t
, reg
| 1);
2799 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2801 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2802 TCGv_i64 t64
= tcg_temp_new_i64();
2803 tcg_gen_extu_i32_i64(t64
, t
);
2804 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2805 tcg_temp_free_i64(t64
);
2807 gen_store_fpr32(ctx
, t
, reg
| 1);
2811 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2813 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2814 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2816 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2820 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2822 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2823 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2826 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2827 t0
= tcg_temp_new_i64();
2828 tcg_gen_shri_i64(t0
, t
, 32);
2829 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2830 tcg_temp_free_i64(t0
);
2834 static inline int get_fp_bit (int cc
)
2842 /* Addresses computation */
2843 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2845 tcg_gen_add_tl(ret
, arg0
, arg1
);
2847 #if defined(TARGET_MIPS64)
2848 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2849 tcg_gen_ext32s_i64(ret
, ret
);
2854 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2857 tcg_gen_addi_tl(ret
, base
, ofs
);
2859 #if defined(TARGET_MIPS64)
2860 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2861 tcg_gen_ext32s_i64(ret
, ret
);
2866 /* Addresses computation (translation time) */
2867 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2870 target_long sum
= base
+ offset
;
2872 #if defined(TARGET_MIPS64)
2873 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2880 /* Sign-extract the low 32-bits to a target_long. */
2881 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2883 #if defined(TARGET_MIPS64)
2884 tcg_gen_ext32s_i64(ret
, arg
);
2886 tcg_gen_extrl_i64_i32(ret
, arg
);
2890 /* Sign-extract the high 32-bits to a target_long. */
2891 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2893 #if defined(TARGET_MIPS64)
2894 tcg_gen_sari_i64(ret
, arg
, 32);
2896 tcg_gen_extrh_i64_i32(ret
, arg
);
2900 static inline void check_cp0_enabled(DisasContext
*ctx
)
2902 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
2903 generate_exception_err(ctx
, EXCP_CpU
, 0);
2906 static inline void check_cp1_enabled(DisasContext
*ctx
)
2908 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
2909 generate_exception_err(ctx
, EXCP_CpU
, 1);
2912 /* Verify that the processor is running with COP1X instructions enabled.
2913 This is associated with the nabla symbol in the MIPS32 and MIPS64
2916 static inline void check_cop1x(DisasContext
*ctx
)
2918 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
2919 generate_exception_end(ctx
, EXCP_RI
);
2922 /* Verify that the processor is running with 64-bit floating-point
2923 operations enabled. */
2925 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2927 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
2928 generate_exception_end(ctx
, EXCP_RI
);
2932 * Verify if floating point register is valid; an operation is not defined
2933 * if bit 0 of any register specification is set and the FR bit in the
2934 * Status register equals zero, since the register numbers specify an
2935 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2936 * in the Status register equals one, both even and odd register numbers
2937 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2939 * Multiple 64 bit wide registers can be checked by calling
2940 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2942 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2944 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
2945 generate_exception_end(ctx
, EXCP_RI
);
2948 /* Verify that the processor is running with DSP instructions enabled.
2949 This is enabled by CP0 Status register MX(24) bit.
2952 static inline void check_dsp(DisasContext
*ctx
)
2954 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2955 if (ctx
->insn_flags
& ASE_DSP
) {
2956 generate_exception_end(ctx
, EXCP_DSPDIS
);
2958 generate_exception_end(ctx
, EXCP_RI
);
2963 static inline void check_dsp_r2(DisasContext
*ctx
)
2965 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2966 if (ctx
->insn_flags
& ASE_DSP
) {
2967 generate_exception_end(ctx
, EXCP_DSPDIS
);
2969 generate_exception_end(ctx
, EXCP_RI
);
2974 static inline void check_dsp_r3(DisasContext
*ctx
)
2976 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2977 if (ctx
->insn_flags
& ASE_DSP
) {
2978 generate_exception_end(ctx
, EXCP_DSPDIS
);
2980 generate_exception_end(ctx
, EXCP_RI
);
2985 /* This code generates a "reserved instruction" exception if the
2986 CPU does not support the instruction set corresponding to flags. */
2987 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
2989 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2990 generate_exception_end(ctx
, EXCP_RI
);
2994 /* This code generates a "reserved instruction" exception if the
2995 CPU has corresponding flag set which indicates that the instruction
2996 has been removed. */
2997 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2999 if (unlikely(ctx
->insn_flags
& flags
)) {
3000 generate_exception_end(ctx
, EXCP_RI
);
3005 * The Linux kernel traps certain reserved instruction exceptions to
3006 * emulate the corresponding instructions. QEMU is the kernel in user
3007 * mode, so those traps are emulated by accepting the instructions.
3009 * A reserved instruction exception is generated for flagged CPUs if
3010 * QEMU runs in system mode.
3012 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3014 #ifndef CONFIG_USER_ONLY
3015 check_insn_opc_removed(ctx
, flags
);
3019 /* This code generates a "reserved instruction" exception if the
3020 CPU does not support 64-bit paired-single (PS) floating point data type */
3021 static inline void check_ps(DisasContext
*ctx
)
3023 if (unlikely(!ctx
->ps
)) {
3024 generate_exception(ctx
, EXCP_RI
);
3026 check_cp1_64bitmode(ctx
);
3029 #ifdef TARGET_MIPS64
3030 /* This code generates a "reserved instruction" exception if 64-bit
3031 instructions are not enabled. */
3032 static inline void check_mips_64(DisasContext
*ctx
)
3034 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
3035 generate_exception_end(ctx
, EXCP_RI
);
3039 #ifndef CONFIG_USER_ONLY
3040 static inline void check_mvh(DisasContext
*ctx
)
3042 if (unlikely(!ctx
->mvh
)) {
3043 generate_exception(ctx
, EXCP_RI
);
3049 * This code generates a "reserved instruction" exception if the
3050 * Config5 XNP bit is set.
3052 static inline void check_xnp(DisasContext
*ctx
)
3054 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3055 generate_exception_end(ctx
, EXCP_RI
);
3059 #ifndef CONFIG_USER_ONLY
3061 * This code generates a "reserved instruction" exception if the
3062 * Config3 PW bit is NOT set.
3064 static inline void check_pw(DisasContext
*ctx
)
3066 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3067 generate_exception_end(ctx
, EXCP_RI
);
3073 * This code generates a "reserved instruction" exception if the
3074 * Config3 MT bit is NOT set.
3076 static inline void check_mt(DisasContext
*ctx
)
3078 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3079 generate_exception_end(ctx
, EXCP_RI
);
3083 #ifndef CONFIG_USER_ONLY
3085 * This code generates a "coprocessor unusable" exception if CP0 is not
3086 * available, and, if that is not the case, generates a "reserved instruction"
3087 * exception if the Config5 MT bit is NOT set. This is needed for availability
3088 * control of some of MT ASE instructions.
3090 static inline void check_cp0_mt(DisasContext
*ctx
)
3092 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3093 generate_exception_err(ctx
, EXCP_CpU
, 0);
3095 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3096 generate_exception_err(ctx
, EXCP_RI
, 0);
3103 * This code generates a "reserved instruction" exception if the
3104 * Config5 NMS bit is set.
3106 static inline void check_nms(DisasContext
*ctx
)
3108 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3109 generate_exception_end(ctx
, EXCP_RI
);
3114 * This code generates a "reserved instruction" exception if the
3115 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3116 * Config2 TL, and Config5 L2C are unset.
3118 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3120 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3121 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3122 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3123 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3124 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3125 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))
3127 generate_exception_end(ctx
, EXCP_RI
);
3132 * This code generates a "reserved instruction" exception if the
3133 * Config5 EVA bit is NOT set.
3135 static inline void check_eva(DisasContext
*ctx
)
3137 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3138 generate_exception_end(ctx
, EXCP_RI
);
3143 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3144 calling interface for 32 and 64-bit FPRs. No sense in changing
3145 all callers for gen_load_fpr32 when we need the CTX parameter for
3147 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3148 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3149 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3150 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3151 int ft, int fs, int cc) \
3153 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3154 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3163 check_cp1_registers(ctx, fs | ft); \
3171 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3172 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3174 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
3175 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
3176 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
3177 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
3178 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
3179 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
3180 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
3181 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
3182 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
3183 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
3184 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
3185 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
3186 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
3187 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
3188 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
3189 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
3192 tcg_temp_free_i##bits (fp0); \
3193 tcg_temp_free_i##bits (fp1); \
3196 FOP_CONDS(, 0, d
, FMT_D
, 64)
3197 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3198 FOP_CONDS(, 0, s
, FMT_S
, 32)
3199 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3200 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3201 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3204 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3205 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3206 int ft, int fs, int fd) \
3208 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3209 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3210 if (ifmt == FMT_D) { \
3211 check_cp1_registers(ctx, fs | ft | fd); \
3213 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3214 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3217 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3220 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3223 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3226 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3229 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3232 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3235 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3238 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3241 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3244 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3247 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3250 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3253 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3256 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3259 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3262 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3265 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3268 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3271 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3274 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3277 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3280 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3286 tcg_temp_free_i ## bits (fp0); \
3287 tcg_temp_free_i ## bits (fp1); \
3290 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3291 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3293 #undef gen_ldcmp_fpr32
3294 #undef gen_ldcmp_fpr64
3296 /* load/store instructions. */
3297 #ifdef CONFIG_USER_ONLY
3298 #define OP_LD_ATOMIC(insn,fname) \
3299 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3300 DisasContext *ctx) \
3302 TCGv t0 = tcg_temp_new(); \
3303 tcg_gen_mov_tl(t0, arg1); \
3304 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3305 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3306 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3307 tcg_temp_free(t0); \
3310 #define OP_LD_ATOMIC(insn,fname) \
3311 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3312 DisasContext *ctx) \
3314 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3317 OP_LD_ATOMIC(ll
,ld32s
);
3318 #if defined(TARGET_MIPS64)
3319 OP_LD_ATOMIC(lld
,ld64
);
3323 #ifdef CONFIG_USER_ONLY
3324 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3325 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3326 DisasContext *ctx) \
3328 TCGv t0 = tcg_temp_new(); \
3329 TCGLabel *l1 = gen_new_label(); \
3330 TCGLabel *l2 = gen_new_label(); \
3332 tcg_gen_andi_tl(t0, arg2, almask); \
3333 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
3334 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
3335 generate_exception(ctx, EXCP_AdES); \
3336 gen_set_label(l1); \
3337 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3338 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
3339 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
3340 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
3341 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
3342 generate_exception_end(ctx, EXCP_SC); \
3343 gen_set_label(l2); \
3344 tcg_gen_movi_tl(t0, 0); \
3345 gen_store_gpr(t0, rt); \
3346 tcg_temp_free(t0); \
3349 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
3350 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
3351 DisasContext *ctx) \
3353 TCGv t0 = tcg_temp_new(); \
3354 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
3355 gen_store_gpr(t0, rt); \
3356 tcg_temp_free(t0); \
3359 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
3360 #if defined(TARGET_MIPS64)
3361 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
3365 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
3366 int base
, int offset
)
3369 tcg_gen_movi_tl(addr
, offset
);
3370 } else if (offset
== 0) {
3371 gen_load_gpr(addr
, base
);
3373 tcg_gen_movi_tl(addr
, offset
);
3374 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3378 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
3380 target_ulong pc
= ctx
->base
.pc_next
;
3382 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3383 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3388 pc
&= ~(target_ulong
)3;
3393 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3394 int rt
, int base
, int offset
)
3397 int mem_idx
= ctx
->mem_idx
;
3399 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3400 /* Loongson CPU uses a load to zero register for prefetch.
3401 We emulate it as a NOP. On other CPU we must perform the
3402 actual memory access. */
3406 t0
= tcg_temp_new();
3407 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3410 #if defined(TARGET_MIPS64)
3412 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3413 ctx
->default_tcg_memop_mask
);
3414 gen_store_gpr(t0
, rt
);
3417 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3418 ctx
->default_tcg_memop_mask
);
3419 gen_store_gpr(t0
, rt
);
3423 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3424 gen_store_gpr(t0
, rt
);
3427 t1
= tcg_temp_new();
3428 /* Do a byte access to possibly trigger a page
3429 fault with the unaligned address. */
3430 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3431 tcg_gen_andi_tl(t1
, t0
, 7);
3432 #ifndef TARGET_WORDS_BIGENDIAN
3433 tcg_gen_xori_tl(t1
, t1
, 7);
3435 tcg_gen_shli_tl(t1
, t1
, 3);
3436 tcg_gen_andi_tl(t0
, t0
, ~7);
3437 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3438 tcg_gen_shl_tl(t0
, t0
, t1
);
3439 t2
= tcg_const_tl(-1);
3440 tcg_gen_shl_tl(t2
, t2
, t1
);
3441 gen_load_gpr(t1
, rt
);
3442 tcg_gen_andc_tl(t1
, t1
, t2
);
3444 tcg_gen_or_tl(t0
, t0
, t1
);
3446 gen_store_gpr(t0
, rt
);
3449 t1
= tcg_temp_new();
3450 /* Do a byte access to possibly trigger a page
3451 fault with the unaligned address. */
3452 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3453 tcg_gen_andi_tl(t1
, t0
, 7);
3454 #ifdef TARGET_WORDS_BIGENDIAN
3455 tcg_gen_xori_tl(t1
, t1
, 7);
3457 tcg_gen_shli_tl(t1
, t1
, 3);
3458 tcg_gen_andi_tl(t0
, t0
, ~7);
3459 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3460 tcg_gen_shr_tl(t0
, t0
, t1
);
3461 tcg_gen_xori_tl(t1
, t1
, 63);
3462 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3463 tcg_gen_shl_tl(t2
, t2
, t1
);
3464 gen_load_gpr(t1
, rt
);
3465 tcg_gen_and_tl(t1
, t1
, t2
);
3467 tcg_gen_or_tl(t0
, t0
, t1
);
3469 gen_store_gpr(t0
, rt
);
3472 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3473 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3475 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3476 gen_store_gpr(t0
, rt
);
3480 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3481 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3483 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3484 gen_store_gpr(t0
, rt
);
3487 mem_idx
= MIPS_HFLAG_UM
;
3490 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3491 ctx
->default_tcg_memop_mask
);
3492 gen_store_gpr(t0
, rt
);
3495 mem_idx
= MIPS_HFLAG_UM
;
3498 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3499 ctx
->default_tcg_memop_mask
);
3500 gen_store_gpr(t0
, rt
);
3503 mem_idx
= MIPS_HFLAG_UM
;
3506 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3507 ctx
->default_tcg_memop_mask
);
3508 gen_store_gpr(t0
, rt
);
3511 mem_idx
= MIPS_HFLAG_UM
;
3514 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3515 gen_store_gpr(t0
, rt
);
3518 mem_idx
= MIPS_HFLAG_UM
;
3521 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3522 gen_store_gpr(t0
, rt
);
3525 mem_idx
= MIPS_HFLAG_UM
;
3528 t1
= tcg_temp_new();
3529 /* Do a byte access to possibly trigger a page
3530 fault with the unaligned address. */
3531 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3532 tcg_gen_andi_tl(t1
, t0
, 3);
3533 #ifndef TARGET_WORDS_BIGENDIAN
3534 tcg_gen_xori_tl(t1
, t1
, 3);
3536 tcg_gen_shli_tl(t1
, t1
, 3);
3537 tcg_gen_andi_tl(t0
, t0
, ~3);
3538 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3539 tcg_gen_shl_tl(t0
, t0
, t1
);
3540 t2
= tcg_const_tl(-1);
3541 tcg_gen_shl_tl(t2
, t2
, t1
);
3542 gen_load_gpr(t1
, rt
);
3543 tcg_gen_andc_tl(t1
, t1
, t2
);
3545 tcg_gen_or_tl(t0
, t0
, t1
);
3547 tcg_gen_ext32s_tl(t0
, t0
);
3548 gen_store_gpr(t0
, rt
);
3551 mem_idx
= MIPS_HFLAG_UM
;
3554 t1
= tcg_temp_new();
3555 /* Do a byte access to possibly trigger a page
3556 fault with the unaligned address. */
3557 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3558 tcg_gen_andi_tl(t1
, t0
, 3);
3559 #ifdef TARGET_WORDS_BIGENDIAN
3560 tcg_gen_xori_tl(t1
, t1
, 3);
3562 tcg_gen_shli_tl(t1
, t1
, 3);
3563 tcg_gen_andi_tl(t0
, t0
, ~3);
3564 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3565 tcg_gen_shr_tl(t0
, t0
, t1
);
3566 tcg_gen_xori_tl(t1
, t1
, 31);
3567 t2
= tcg_const_tl(0xfffffffeull
);
3568 tcg_gen_shl_tl(t2
, t2
, t1
);
3569 gen_load_gpr(t1
, rt
);
3570 tcg_gen_and_tl(t1
, t1
, t2
);
3572 tcg_gen_or_tl(t0
, t0
, t1
);
3574 tcg_gen_ext32s_tl(t0
, t0
);
3575 gen_store_gpr(t0
, rt
);
3578 mem_idx
= MIPS_HFLAG_UM
;
3582 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3583 gen_store_gpr(t0
, rt
);
3589 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3590 uint32_t reg1
, uint32_t reg2
)
3592 TCGv taddr
= tcg_temp_new();
3593 TCGv_i64 tval
= tcg_temp_new_i64();
3594 TCGv tmp1
= tcg_temp_new();
3595 TCGv tmp2
= tcg_temp_new();
3597 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3598 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3599 #ifdef TARGET_WORDS_BIGENDIAN
3600 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3602 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3604 gen_store_gpr(tmp1
, reg1
);
3605 tcg_temp_free(tmp1
);
3606 gen_store_gpr(tmp2
, reg2
);
3607 tcg_temp_free(tmp2
);
3608 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3609 tcg_temp_free_i64(tval
);
3610 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3611 tcg_temp_free(taddr
);
3615 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
3616 int base
, int offset
)
3618 TCGv t0
= tcg_temp_new();
3619 TCGv t1
= tcg_temp_new();
3620 int mem_idx
= ctx
->mem_idx
;
3622 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3623 gen_load_gpr(t1
, rt
);
3625 #if defined(TARGET_MIPS64)
3627 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3628 ctx
->default_tcg_memop_mask
);
3631 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3634 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3638 mem_idx
= MIPS_HFLAG_UM
;
3641 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3642 ctx
->default_tcg_memop_mask
);
3645 mem_idx
= MIPS_HFLAG_UM
;
3648 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3649 ctx
->default_tcg_memop_mask
);
3652 mem_idx
= MIPS_HFLAG_UM
;
3655 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3658 mem_idx
= MIPS_HFLAG_UM
;
3661 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3664 mem_idx
= MIPS_HFLAG_UM
;
3667 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3675 /* Store conditional */
3676 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
3677 int base
, int16_t offset
)
3680 int mem_idx
= ctx
->mem_idx
;
3682 #ifdef CONFIG_USER_ONLY
3683 t0
= tcg_temp_local_new();
3684 t1
= tcg_temp_local_new();
3686 t0
= tcg_temp_new();
3687 t1
= tcg_temp_new();
3689 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3690 gen_load_gpr(t1
, rt
);
3692 #if defined(TARGET_MIPS64)
3695 op_st_scd(t1
, t0
, rt
, mem_idx
, ctx
);
3699 mem_idx
= MIPS_HFLAG_UM
;
3703 op_st_sc(t1
, t0
, rt
, mem_idx
, ctx
);
3710 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3711 uint32_t reg1
, uint32_t reg2
)
3713 TCGv taddr
= tcg_temp_local_new();
3714 TCGv lladdr
= tcg_temp_local_new();
3715 TCGv_i64 tval
= tcg_temp_new_i64();
3716 TCGv_i64 llval
= tcg_temp_new_i64();
3717 TCGv_i64 val
= tcg_temp_new_i64();
3718 TCGv tmp1
= tcg_temp_new();
3719 TCGv tmp2
= tcg_temp_new();
3720 TCGLabel
*lab_fail
= gen_new_label();
3721 TCGLabel
*lab_done
= gen_new_label();
3723 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3725 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3726 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3728 gen_load_gpr(tmp1
, reg1
);
3729 gen_load_gpr(tmp2
, reg2
);
3731 #ifdef TARGET_WORDS_BIGENDIAN
3732 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3734 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3737 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3738 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3739 ctx
->mem_idx
, MO_64
);
3741 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3743 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3745 gen_set_label(lab_fail
);
3748 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3750 gen_set_label(lab_done
);
3751 tcg_gen_movi_tl(lladdr
, -1);
3752 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3755 /* Load and store */
3756 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
3759 /* Don't do NOP if destination is zero: we must perform the actual
3764 TCGv_i32 fp0
= tcg_temp_new_i32();
3765 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3766 ctx
->default_tcg_memop_mask
);
3767 gen_store_fpr32(ctx
, fp0
, ft
);
3768 tcg_temp_free_i32(fp0
);
3773 TCGv_i32 fp0
= tcg_temp_new_i32();
3774 gen_load_fpr32(ctx
, fp0
, ft
);
3775 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3776 ctx
->default_tcg_memop_mask
);
3777 tcg_temp_free_i32(fp0
);
3782 TCGv_i64 fp0
= tcg_temp_new_i64();
3783 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3784 ctx
->default_tcg_memop_mask
);
3785 gen_store_fpr64(ctx
, fp0
, ft
);
3786 tcg_temp_free_i64(fp0
);
3791 TCGv_i64 fp0
= tcg_temp_new_i64();
3792 gen_load_fpr64(ctx
, fp0
, ft
);
3793 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3794 ctx
->default_tcg_memop_mask
);
3795 tcg_temp_free_i64(fp0
);
3799 MIPS_INVAL("flt_ldst");
3800 generate_exception_end(ctx
, EXCP_RI
);
3805 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3806 int rs
, int16_t imm
)
3808 TCGv t0
= tcg_temp_new();
3810 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3811 check_cp1_enabled(ctx
);
3815 check_insn(ctx
, ISA_MIPS2
);
3818 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3819 gen_flt_ldst(ctx
, op
, rt
, t0
);
3822 generate_exception_err(ctx
, EXCP_CpU
, 1);
3827 /* Arithmetic with immediate operand */
3828 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3829 int rt
, int rs
, int imm
)
3831 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3833 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3834 /* If no destination, treat it as a NOP.
3835 For addi, we must generate the overflow exception when needed. */
3841 TCGv t0
= tcg_temp_local_new();
3842 TCGv t1
= tcg_temp_new();
3843 TCGv t2
= tcg_temp_new();
3844 TCGLabel
*l1
= gen_new_label();
3846 gen_load_gpr(t1
, rs
);
3847 tcg_gen_addi_tl(t0
, t1
, uimm
);
3848 tcg_gen_ext32s_tl(t0
, t0
);
3850 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3851 tcg_gen_xori_tl(t2
, t0
, uimm
);
3852 tcg_gen_and_tl(t1
, t1
, t2
);
3854 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3856 /* operands of same sign, result different sign */
3857 generate_exception(ctx
, EXCP_OVERFLOW
);
3859 tcg_gen_ext32s_tl(t0
, t0
);
3860 gen_store_gpr(t0
, rt
);
3866 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3867 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3869 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3872 #if defined(TARGET_MIPS64)
3875 TCGv t0
= tcg_temp_local_new();
3876 TCGv t1
= tcg_temp_new();
3877 TCGv t2
= tcg_temp_new();
3878 TCGLabel
*l1
= gen_new_label();
3880 gen_load_gpr(t1
, rs
);
3881 tcg_gen_addi_tl(t0
, t1
, uimm
);
3883 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3884 tcg_gen_xori_tl(t2
, t0
, uimm
);
3885 tcg_gen_and_tl(t1
, t1
, t2
);
3887 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3889 /* operands of same sign, result different sign */
3890 generate_exception(ctx
, EXCP_OVERFLOW
);
3892 gen_store_gpr(t0
, rt
);
3898 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3900 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3907 /* Logic with immediate operand */
3908 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3909 int rt
, int rs
, int16_t imm
)
3914 /* If no destination, treat it as a NOP. */
3917 uimm
= (uint16_t)imm
;
3920 if (likely(rs
!= 0))
3921 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3923 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3927 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3929 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3932 if (likely(rs
!= 0))
3933 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3935 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3938 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3940 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3941 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3943 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3952 /* Set on less than with immediate operand */
3953 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3954 int rt
, int rs
, int16_t imm
)
3956 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3960 /* If no destination, treat it as a NOP. */
3963 t0
= tcg_temp_new();
3964 gen_load_gpr(t0
, rs
);
3967 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3970 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3976 /* Shifts with immediate operand */
3977 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3978 int rt
, int rs
, int16_t imm
)
3980 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3984 /* If no destination, treat it as a NOP. */
3988 t0
= tcg_temp_new();
3989 gen_load_gpr(t0
, rs
);
3992 tcg_gen_shli_tl(t0
, t0
, uimm
);
3993 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3996 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4000 tcg_gen_ext32u_tl(t0
, t0
);
4001 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4003 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4008 TCGv_i32 t1
= tcg_temp_new_i32();
4010 tcg_gen_trunc_tl_i32(t1
, t0
);
4011 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4012 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4013 tcg_temp_free_i32(t1
);
4015 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4018 #if defined(TARGET_MIPS64)
4020 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4023 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4026 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4030 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4032 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4036 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4039 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4042 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4045 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4053 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4054 int rd
, int rs
, int rt
)
4056 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4057 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4058 /* If no destination, treat it as a NOP.
4059 For add & sub, we must generate the overflow exception when needed. */
4066 TCGv t0
= tcg_temp_local_new();
4067 TCGv t1
= tcg_temp_new();
4068 TCGv t2
= tcg_temp_new();
4069 TCGLabel
*l1
= gen_new_label();
4071 gen_load_gpr(t1
, rs
);
4072 gen_load_gpr(t2
, rt
);
4073 tcg_gen_add_tl(t0
, t1
, t2
);
4074 tcg_gen_ext32s_tl(t0
, t0
);
4075 tcg_gen_xor_tl(t1
, t1
, t2
);
4076 tcg_gen_xor_tl(t2
, t0
, t2
);
4077 tcg_gen_andc_tl(t1
, t2
, t1
);
4079 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4081 /* operands of same sign, result different sign */
4082 generate_exception(ctx
, EXCP_OVERFLOW
);
4084 gen_store_gpr(t0
, rd
);
4089 if (rs
!= 0 && rt
!= 0) {
4090 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4091 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4092 } else if (rs
== 0 && rt
!= 0) {
4093 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4094 } else if (rs
!= 0 && rt
== 0) {
4095 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4097 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4102 TCGv t0
= tcg_temp_local_new();
4103 TCGv t1
= tcg_temp_new();
4104 TCGv t2
= tcg_temp_new();
4105 TCGLabel
*l1
= gen_new_label();
4107 gen_load_gpr(t1
, rs
);
4108 gen_load_gpr(t2
, rt
);
4109 tcg_gen_sub_tl(t0
, t1
, t2
);
4110 tcg_gen_ext32s_tl(t0
, t0
);
4111 tcg_gen_xor_tl(t2
, t1
, t2
);
4112 tcg_gen_xor_tl(t1
, t0
, t1
);
4113 tcg_gen_and_tl(t1
, t1
, t2
);
4115 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4117 /* operands of different sign, first operand and result different sign */
4118 generate_exception(ctx
, EXCP_OVERFLOW
);
4120 gen_store_gpr(t0
, rd
);
4125 if (rs
!= 0 && rt
!= 0) {
4126 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4127 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4128 } else if (rs
== 0 && rt
!= 0) {
4129 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4130 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4131 } else if (rs
!= 0 && rt
== 0) {
4132 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4134 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4137 #if defined(TARGET_MIPS64)
4140 TCGv t0
= tcg_temp_local_new();
4141 TCGv t1
= tcg_temp_new();
4142 TCGv t2
= tcg_temp_new();
4143 TCGLabel
*l1
= gen_new_label();
4145 gen_load_gpr(t1
, rs
);
4146 gen_load_gpr(t2
, rt
);
4147 tcg_gen_add_tl(t0
, t1
, t2
);
4148 tcg_gen_xor_tl(t1
, t1
, t2
);
4149 tcg_gen_xor_tl(t2
, t0
, t2
);
4150 tcg_gen_andc_tl(t1
, t2
, t1
);
4152 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4154 /* operands of same sign, result different sign */
4155 generate_exception(ctx
, EXCP_OVERFLOW
);
4157 gen_store_gpr(t0
, rd
);
4162 if (rs
!= 0 && rt
!= 0) {
4163 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4164 } else if (rs
== 0 && rt
!= 0) {
4165 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4166 } else if (rs
!= 0 && rt
== 0) {
4167 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4169 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4174 TCGv t0
= tcg_temp_local_new();
4175 TCGv t1
= tcg_temp_new();
4176 TCGv t2
= tcg_temp_new();
4177 TCGLabel
*l1
= gen_new_label();
4179 gen_load_gpr(t1
, rs
);
4180 gen_load_gpr(t2
, rt
);
4181 tcg_gen_sub_tl(t0
, t1
, t2
);
4182 tcg_gen_xor_tl(t2
, t1
, t2
);
4183 tcg_gen_xor_tl(t1
, t0
, t1
);
4184 tcg_gen_and_tl(t1
, t1
, t2
);
4186 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4188 /* operands of different sign, first operand and result different sign */
4189 generate_exception(ctx
, EXCP_OVERFLOW
);
4191 gen_store_gpr(t0
, rd
);
4196 if (rs
!= 0 && rt
!= 0) {
4197 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4198 } else if (rs
== 0 && rt
!= 0) {
4199 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4200 } else if (rs
!= 0 && rt
== 0) {
4201 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4203 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4208 if (likely(rs
!= 0 && rt
!= 0)) {
4209 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4210 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4212 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4218 /* Conditional move */
4219 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4220 int rd
, int rs
, int rt
)
4225 /* If no destination, treat it as a NOP. */
4229 t0
= tcg_temp_new();
4230 gen_load_gpr(t0
, rt
);
4231 t1
= tcg_const_tl(0);
4232 t2
= tcg_temp_new();
4233 gen_load_gpr(t2
, rs
);
4236 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4239 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4242 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4245 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4254 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4255 int rd
, int rs
, int rt
)
4258 /* If no destination, treat it as a NOP. */
4264 if (likely(rs
!= 0 && rt
!= 0)) {
4265 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4267 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4271 if (rs
!= 0 && rt
!= 0) {
4272 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4273 } else if (rs
== 0 && rt
!= 0) {
4274 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4275 } else if (rs
!= 0 && rt
== 0) {
4276 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4278 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4282 if (likely(rs
!= 0 && rt
!= 0)) {
4283 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4284 } else if (rs
== 0 && rt
!= 0) {
4285 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4286 } else if (rs
!= 0 && rt
== 0) {
4287 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4289 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4293 if (likely(rs
!= 0 && rt
!= 0)) {
4294 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4295 } else if (rs
== 0 && rt
!= 0) {
4296 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4297 } else if (rs
!= 0 && rt
== 0) {
4298 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4300 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4306 /* Set on lower than */
4307 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4308 int rd
, int rs
, int rt
)
4313 /* If no destination, treat it as a NOP. */
4317 t0
= tcg_temp_new();
4318 t1
= tcg_temp_new();
4319 gen_load_gpr(t0
, rs
);
4320 gen_load_gpr(t1
, rt
);
4323 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4326 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4334 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4335 int rd
, int rs
, int rt
)
4340 /* If no destination, treat it as a NOP.
4341 For add & sub, we must generate the overflow exception when needed. */
4345 t0
= tcg_temp_new();
4346 t1
= tcg_temp_new();
4347 gen_load_gpr(t0
, rs
);
4348 gen_load_gpr(t1
, rt
);
4351 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4352 tcg_gen_shl_tl(t0
, t1
, t0
);
4353 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4356 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4357 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4360 tcg_gen_ext32u_tl(t1
, t1
);
4361 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4362 tcg_gen_shr_tl(t0
, t1
, t0
);
4363 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4367 TCGv_i32 t2
= tcg_temp_new_i32();
4368 TCGv_i32 t3
= tcg_temp_new_i32();
4370 tcg_gen_trunc_tl_i32(t2
, t0
);
4371 tcg_gen_trunc_tl_i32(t3
, t1
);
4372 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4373 tcg_gen_rotr_i32(t2
, t3
, t2
);
4374 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4375 tcg_temp_free_i32(t2
);
4376 tcg_temp_free_i32(t3
);
4379 #if defined(TARGET_MIPS64)
4381 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4382 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4385 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4386 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4389 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4390 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4393 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4394 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4402 /* Copy GPR to and from TX79 HI1/LO1 register. */
4403 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4405 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4412 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4415 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4419 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4421 tcg_gen_movi_tl(cpu_HI
[1], 0);
4426 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4428 tcg_gen_movi_tl(cpu_LO
[1], 0);
4432 MIPS_INVAL("mfthilo1 TX79");
4433 generate_exception_end(ctx
, EXCP_RI
);
4438 /* Arithmetic on HI/LO registers */
4439 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4441 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4452 #if defined(TARGET_MIPS64)
4454 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4458 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4462 #if defined(TARGET_MIPS64)
4464 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4468 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4473 #if defined(TARGET_MIPS64)
4475 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4479 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4482 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4487 #if defined(TARGET_MIPS64)
4489 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4493 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4496 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4502 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4505 TCGv t0
= tcg_const_tl(addr
);
4506 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4507 gen_store_gpr(t0
, reg
);
4511 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4517 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4520 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4521 addr
= addr_add(ctx
, pc
, offset
);
4522 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4526 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4527 addr
= addr_add(ctx
, pc
, offset
);
4528 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4530 #if defined(TARGET_MIPS64)
4533 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4534 addr
= addr_add(ctx
, pc
, offset
);
4535 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4539 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4542 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4543 addr
= addr_add(ctx
, pc
, offset
);
4544 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4549 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4550 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4551 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4554 #if defined(TARGET_MIPS64)
4555 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4556 case R6_OPC_LDPC
+ (1 << 16):
4557 case R6_OPC_LDPC
+ (2 << 16):
4558 case R6_OPC_LDPC
+ (3 << 16):
4560 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4561 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4562 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4566 MIPS_INVAL("OPC_PCREL");
4567 generate_exception_end(ctx
, EXCP_RI
);
4574 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4583 t0
= tcg_temp_new();
4584 t1
= tcg_temp_new();
4586 gen_load_gpr(t0
, rs
);
4587 gen_load_gpr(t1
, rt
);
4592 TCGv t2
= tcg_temp_new();
4593 TCGv t3
= tcg_temp_new();
4594 tcg_gen_ext32s_tl(t0
, t0
);
4595 tcg_gen_ext32s_tl(t1
, t1
);
4596 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4597 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4598 tcg_gen_and_tl(t2
, t2
, t3
);
4599 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4600 tcg_gen_or_tl(t2
, t2
, t3
);
4601 tcg_gen_movi_tl(t3
, 0);
4602 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4603 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4604 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4611 TCGv t2
= tcg_temp_new();
4612 TCGv t3
= tcg_temp_new();
4613 tcg_gen_ext32s_tl(t0
, t0
);
4614 tcg_gen_ext32s_tl(t1
, t1
);
4615 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4616 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4617 tcg_gen_and_tl(t2
, t2
, t3
);
4618 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4619 tcg_gen_or_tl(t2
, t2
, t3
);
4620 tcg_gen_movi_tl(t3
, 0);
4621 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4622 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4623 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4630 TCGv t2
= tcg_const_tl(0);
4631 TCGv t3
= tcg_const_tl(1);
4632 tcg_gen_ext32u_tl(t0
, t0
);
4633 tcg_gen_ext32u_tl(t1
, t1
);
4634 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4635 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4636 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4643 TCGv t2
= tcg_const_tl(0);
4644 TCGv t3
= tcg_const_tl(1);
4645 tcg_gen_ext32u_tl(t0
, t0
);
4646 tcg_gen_ext32u_tl(t1
, t1
);
4647 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4648 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4649 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4656 TCGv_i32 t2
= tcg_temp_new_i32();
4657 TCGv_i32 t3
= tcg_temp_new_i32();
4658 tcg_gen_trunc_tl_i32(t2
, t0
);
4659 tcg_gen_trunc_tl_i32(t3
, t1
);
4660 tcg_gen_mul_i32(t2
, t2
, t3
);
4661 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4662 tcg_temp_free_i32(t2
);
4663 tcg_temp_free_i32(t3
);
4668 TCGv_i32 t2
= tcg_temp_new_i32();
4669 TCGv_i32 t3
= tcg_temp_new_i32();
4670 tcg_gen_trunc_tl_i32(t2
, t0
);
4671 tcg_gen_trunc_tl_i32(t3
, t1
);
4672 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4673 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4674 tcg_temp_free_i32(t2
);
4675 tcg_temp_free_i32(t3
);
4680 TCGv_i32 t2
= tcg_temp_new_i32();
4681 TCGv_i32 t3
= tcg_temp_new_i32();
4682 tcg_gen_trunc_tl_i32(t2
, t0
);
4683 tcg_gen_trunc_tl_i32(t3
, t1
);
4684 tcg_gen_mul_i32(t2
, t2
, t3
);
4685 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4686 tcg_temp_free_i32(t2
);
4687 tcg_temp_free_i32(t3
);
4692 TCGv_i32 t2
= tcg_temp_new_i32();
4693 TCGv_i32 t3
= tcg_temp_new_i32();
4694 tcg_gen_trunc_tl_i32(t2
, t0
);
4695 tcg_gen_trunc_tl_i32(t3
, t1
);
4696 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4697 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4698 tcg_temp_free_i32(t2
);
4699 tcg_temp_free_i32(t3
);
4702 #if defined(TARGET_MIPS64)
4705 TCGv t2
= tcg_temp_new();
4706 TCGv t3
= tcg_temp_new();
4707 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4708 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4709 tcg_gen_and_tl(t2
, t2
, t3
);
4710 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4711 tcg_gen_or_tl(t2
, t2
, t3
);
4712 tcg_gen_movi_tl(t3
, 0);
4713 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4714 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4721 TCGv t2
= tcg_temp_new();
4722 TCGv t3
= tcg_temp_new();
4723 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4724 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4725 tcg_gen_and_tl(t2
, t2
, t3
);
4726 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4727 tcg_gen_or_tl(t2
, t2
, t3
);
4728 tcg_gen_movi_tl(t3
, 0);
4729 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4730 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4737 TCGv t2
= tcg_const_tl(0);
4738 TCGv t3
= tcg_const_tl(1);
4739 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4740 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4747 TCGv t2
= tcg_const_tl(0);
4748 TCGv t3
= tcg_const_tl(1);
4749 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4750 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4756 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4760 TCGv t2
= tcg_temp_new();
4761 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4766 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4770 TCGv t2
= tcg_temp_new();
4771 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4777 MIPS_INVAL("r6 mul/div");
4778 generate_exception_end(ctx
, EXCP_RI
);
4786 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4790 t0
= tcg_temp_new();
4791 t1
= tcg_temp_new();
4793 gen_load_gpr(t0
, rs
);
4794 gen_load_gpr(t1
, rt
);
4799 TCGv t2
= tcg_temp_new();
4800 TCGv t3
= tcg_temp_new();
4801 tcg_gen_ext32s_tl(t0
, t0
);
4802 tcg_gen_ext32s_tl(t1
, t1
);
4803 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4804 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4805 tcg_gen_and_tl(t2
, t2
, t3
);
4806 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4807 tcg_gen_or_tl(t2
, t2
, t3
);
4808 tcg_gen_movi_tl(t3
, 0);
4809 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4810 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4811 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4812 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4813 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4820 TCGv t2
= tcg_const_tl(0);
4821 TCGv t3
= tcg_const_tl(1);
4822 tcg_gen_ext32u_tl(t0
, t0
);
4823 tcg_gen_ext32u_tl(t1
, t1
);
4824 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4825 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4826 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4827 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4828 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4834 MIPS_INVAL("div1 TX79");
4835 generate_exception_end(ctx
, EXCP_RI
);
4843 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4844 int acc
, int rs
, int rt
)
4848 t0
= tcg_temp_new();
4849 t1
= tcg_temp_new();
4851 gen_load_gpr(t0
, rs
);
4852 gen_load_gpr(t1
, rt
);
4861 TCGv t2
= tcg_temp_new();
4862 TCGv t3
= tcg_temp_new();
4863 tcg_gen_ext32s_tl(t0
, t0
);
4864 tcg_gen_ext32s_tl(t1
, t1
);
4865 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4866 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4867 tcg_gen_and_tl(t2
, t2
, t3
);
4868 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4869 tcg_gen_or_tl(t2
, t2
, t3
);
4870 tcg_gen_movi_tl(t3
, 0);
4871 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4872 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4873 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4874 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4875 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4882 TCGv t2
= tcg_const_tl(0);
4883 TCGv t3
= tcg_const_tl(1);
4884 tcg_gen_ext32u_tl(t0
, t0
);
4885 tcg_gen_ext32u_tl(t1
, t1
);
4886 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4887 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4888 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4889 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4890 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4897 TCGv_i32 t2
= tcg_temp_new_i32();
4898 TCGv_i32 t3
= tcg_temp_new_i32();
4899 tcg_gen_trunc_tl_i32(t2
, t0
);
4900 tcg_gen_trunc_tl_i32(t3
, t1
);
4901 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4902 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4903 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4904 tcg_temp_free_i32(t2
);
4905 tcg_temp_free_i32(t3
);
4910 TCGv_i32 t2
= tcg_temp_new_i32();
4911 TCGv_i32 t3
= tcg_temp_new_i32();
4912 tcg_gen_trunc_tl_i32(t2
, t0
);
4913 tcg_gen_trunc_tl_i32(t3
, t1
);
4914 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4915 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4916 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4917 tcg_temp_free_i32(t2
);
4918 tcg_temp_free_i32(t3
);
4921 #if defined(TARGET_MIPS64)
4924 TCGv t2
= tcg_temp_new();
4925 TCGv t3
= tcg_temp_new();
4926 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4927 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4928 tcg_gen_and_tl(t2
, t2
, t3
);
4929 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4930 tcg_gen_or_tl(t2
, t2
, t3
);
4931 tcg_gen_movi_tl(t3
, 0);
4932 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4933 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4934 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4941 TCGv t2
= tcg_const_tl(0);
4942 TCGv t3
= tcg_const_tl(1);
4943 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4944 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4945 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4951 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4954 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4959 TCGv_i64 t2
= tcg_temp_new_i64();
4960 TCGv_i64 t3
= tcg_temp_new_i64();
4962 tcg_gen_ext_tl_i64(t2
, t0
);
4963 tcg_gen_ext_tl_i64(t3
, t1
);
4964 tcg_gen_mul_i64(t2
, t2
, t3
);
4965 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4966 tcg_gen_add_i64(t2
, t2
, t3
);
4967 tcg_temp_free_i64(t3
);
4968 gen_move_low32(cpu_LO
[acc
], t2
);
4969 gen_move_high32(cpu_HI
[acc
], t2
);
4970 tcg_temp_free_i64(t2
);
4975 TCGv_i64 t2
= tcg_temp_new_i64();
4976 TCGv_i64 t3
= tcg_temp_new_i64();
4978 tcg_gen_ext32u_tl(t0
, t0
);
4979 tcg_gen_ext32u_tl(t1
, t1
);
4980 tcg_gen_extu_tl_i64(t2
, t0
);
4981 tcg_gen_extu_tl_i64(t3
, t1
);
4982 tcg_gen_mul_i64(t2
, t2
, t3
);
4983 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4984 tcg_gen_add_i64(t2
, t2
, t3
);
4985 tcg_temp_free_i64(t3
);
4986 gen_move_low32(cpu_LO
[acc
], t2
);
4987 gen_move_high32(cpu_HI
[acc
], t2
);
4988 tcg_temp_free_i64(t2
);
4993 TCGv_i64 t2
= tcg_temp_new_i64();
4994 TCGv_i64 t3
= tcg_temp_new_i64();
4996 tcg_gen_ext_tl_i64(t2
, t0
);
4997 tcg_gen_ext_tl_i64(t3
, t1
);
4998 tcg_gen_mul_i64(t2
, t2
, t3
);
4999 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5000 tcg_gen_sub_i64(t2
, t3
, t2
);
5001 tcg_temp_free_i64(t3
);
5002 gen_move_low32(cpu_LO
[acc
], t2
);
5003 gen_move_high32(cpu_HI
[acc
], t2
);
5004 tcg_temp_free_i64(t2
);
5009 TCGv_i64 t2
= tcg_temp_new_i64();
5010 TCGv_i64 t3
= tcg_temp_new_i64();
5012 tcg_gen_ext32u_tl(t0
, t0
);
5013 tcg_gen_ext32u_tl(t1
, t1
);
5014 tcg_gen_extu_tl_i64(t2
, t0
);
5015 tcg_gen_extu_tl_i64(t3
, t1
);
5016 tcg_gen_mul_i64(t2
, t2
, t3
);
5017 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5018 tcg_gen_sub_i64(t2
, t3
, t2
);
5019 tcg_temp_free_i64(t3
);
5020 gen_move_low32(cpu_LO
[acc
], t2
);
5021 gen_move_high32(cpu_HI
[acc
], t2
);
5022 tcg_temp_free_i64(t2
);
5026 MIPS_INVAL("mul/div");
5027 generate_exception_end(ctx
, EXCP_RI
);
5036 * These MULT[U] and MADD[U] instructions implemented in for example
5037 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5038 * architectures are special three-operand variants with the syntax
5040 * MULT[U][1] rd, rs, rt
5044 * (rd, LO, HI) <- rs * rt
5048 * MADD[U][1] rd, rs, rt
5052 * (rd, LO, HI) <- (LO, HI) + rs * rt
5054 * where the low-order 32-bits of the result is placed into both the
5055 * GPR rd and the special register LO. The high-order 32-bits of the
5056 * result is placed into the special register HI.
5058 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5059 * which is the zero register that always reads as 0.
5061 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5062 int rd
, int rs
, int rt
)
5064 TCGv t0
= tcg_temp_new();
5065 TCGv t1
= tcg_temp_new();
5068 gen_load_gpr(t0
, rs
);
5069 gen_load_gpr(t1
, rt
);
5077 TCGv_i32 t2
= tcg_temp_new_i32();
5078 TCGv_i32 t3
= tcg_temp_new_i32();
5079 tcg_gen_trunc_tl_i32(t2
, t0
);
5080 tcg_gen_trunc_tl_i32(t3
, t1
);
5081 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5083 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5085 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5086 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5087 tcg_temp_free_i32(t2
);
5088 tcg_temp_free_i32(t3
);
5091 case MMI_OPC_MULTU1
:
5096 TCGv_i32 t2
= tcg_temp_new_i32();
5097 TCGv_i32 t3
= tcg_temp_new_i32();
5098 tcg_gen_trunc_tl_i32(t2
, t0
);
5099 tcg_gen_trunc_tl_i32(t3
, t1
);
5100 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5102 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5104 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5105 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5106 tcg_temp_free_i32(t2
);
5107 tcg_temp_free_i32(t3
);
5115 TCGv_i64 t2
= tcg_temp_new_i64();
5116 TCGv_i64 t3
= tcg_temp_new_i64();
5118 tcg_gen_ext_tl_i64(t2
, t0
);
5119 tcg_gen_ext_tl_i64(t3
, t1
);
5120 tcg_gen_mul_i64(t2
, t2
, t3
);
5121 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5122 tcg_gen_add_i64(t2
, t2
, t3
);
5123 tcg_temp_free_i64(t3
);
5124 gen_move_low32(cpu_LO
[acc
], t2
);
5125 gen_move_high32(cpu_HI
[acc
], t2
);
5127 gen_move_low32(cpu_gpr
[rd
], t2
);
5129 tcg_temp_free_i64(t2
);
5132 case MMI_OPC_MADDU1
:
5137 TCGv_i64 t2
= tcg_temp_new_i64();
5138 TCGv_i64 t3
= tcg_temp_new_i64();
5140 tcg_gen_ext32u_tl(t0
, t0
);
5141 tcg_gen_ext32u_tl(t1
, t1
);
5142 tcg_gen_extu_tl_i64(t2
, t0
);
5143 tcg_gen_extu_tl_i64(t3
, t1
);
5144 tcg_gen_mul_i64(t2
, t2
, t3
);
5145 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5146 tcg_gen_add_i64(t2
, t2
, t3
);
5147 tcg_temp_free_i64(t3
);
5148 gen_move_low32(cpu_LO
[acc
], t2
);
5149 gen_move_high32(cpu_HI
[acc
], t2
);
5151 gen_move_low32(cpu_gpr
[rd
], t2
);
5153 tcg_temp_free_i64(t2
);
5157 MIPS_INVAL("mul/madd TXx9");
5158 generate_exception_end(ctx
, EXCP_RI
);
5167 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
5168 int rd
, int rs
, int rt
)
5170 TCGv t0
= tcg_temp_new();
5171 TCGv t1
= tcg_temp_new();
5173 gen_load_gpr(t0
, rs
);
5174 gen_load_gpr(t1
, rt
);
5177 case OPC_VR54XX_MULS
:
5178 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5180 case OPC_VR54XX_MULSU
:
5181 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5183 case OPC_VR54XX_MACC
:
5184 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5186 case OPC_VR54XX_MACCU
:
5187 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5189 case OPC_VR54XX_MSAC
:
5190 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5192 case OPC_VR54XX_MSACU
:
5193 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5195 case OPC_VR54XX_MULHI
:
5196 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5198 case OPC_VR54XX_MULHIU
:
5199 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5201 case OPC_VR54XX_MULSHI
:
5202 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5204 case OPC_VR54XX_MULSHIU
:
5205 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5207 case OPC_VR54XX_MACCHI
:
5208 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5210 case OPC_VR54XX_MACCHIU
:
5211 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5213 case OPC_VR54XX_MSACHI
:
5214 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5216 case OPC_VR54XX_MSACHIU
:
5217 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5220 MIPS_INVAL("mul vr54xx");
5221 generate_exception_end(ctx
, EXCP_RI
);
5224 gen_store_gpr(t0
, rd
);
5231 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
5241 gen_load_gpr(t0
, rs
);
5246 #if defined(TARGET_MIPS64)
5250 tcg_gen_not_tl(t0
, t0
);
5259 tcg_gen_ext32u_tl(t0
, t0
);
5260 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5261 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5263 #if defined(TARGET_MIPS64)
5268 tcg_gen_clzi_i64(t0
, t0
, 64);
5274 /* Godson integer instructions */
5275 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5276 int rd
, int rs
, int rt
)
5288 case OPC_MULTU_G_2E
:
5289 case OPC_MULTU_G_2F
:
5290 #if defined(TARGET_MIPS64)
5291 case OPC_DMULT_G_2E
:
5292 case OPC_DMULT_G_2F
:
5293 case OPC_DMULTU_G_2E
:
5294 case OPC_DMULTU_G_2F
:
5296 t0
= tcg_temp_new();
5297 t1
= tcg_temp_new();
5300 t0
= tcg_temp_local_new();
5301 t1
= tcg_temp_local_new();
5305 gen_load_gpr(t0
, rs
);
5306 gen_load_gpr(t1
, rt
);
5311 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5312 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5314 case OPC_MULTU_G_2E
:
5315 case OPC_MULTU_G_2F
:
5316 tcg_gen_ext32u_tl(t0
, t0
);
5317 tcg_gen_ext32u_tl(t1
, t1
);
5318 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5319 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5324 TCGLabel
*l1
= gen_new_label();
5325 TCGLabel
*l2
= gen_new_label();
5326 TCGLabel
*l3
= gen_new_label();
5327 tcg_gen_ext32s_tl(t0
, t0
);
5328 tcg_gen_ext32s_tl(t1
, t1
);
5329 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5330 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5333 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5334 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5335 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5338 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5339 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5346 TCGLabel
*l1
= gen_new_label();
5347 TCGLabel
*l2
= gen_new_label();
5348 tcg_gen_ext32u_tl(t0
, t0
);
5349 tcg_gen_ext32u_tl(t1
, t1
);
5350 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5351 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5354 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5355 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5362 TCGLabel
*l1
= gen_new_label();
5363 TCGLabel
*l2
= gen_new_label();
5364 TCGLabel
*l3
= gen_new_label();
5365 tcg_gen_ext32u_tl(t0
, t0
);
5366 tcg_gen_ext32u_tl(t1
, t1
);
5367 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5368 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5369 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5371 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5374 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5375 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5382 TCGLabel
*l1
= gen_new_label();
5383 TCGLabel
*l2
= gen_new_label();
5384 tcg_gen_ext32u_tl(t0
, t0
);
5385 tcg_gen_ext32u_tl(t1
, t1
);
5386 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5387 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5390 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5391 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5395 #if defined(TARGET_MIPS64)
5396 case OPC_DMULT_G_2E
:
5397 case OPC_DMULT_G_2F
:
5398 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5400 case OPC_DMULTU_G_2E
:
5401 case OPC_DMULTU_G_2F
:
5402 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5407 TCGLabel
*l1
= gen_new_label();
5408 TCGLabel
*l2
= gen_new_label();
5409 TCGLabel
*l3
= gen_new_label();
5410 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5411 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5414 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5415 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5416 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5419 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5423 case OPC_DDIVU_G_2E
:
5424 case OPC_DDIVU_G_2F
:
5426 TCGLabel
*l1
= gen_new_label();
5427 TCGLabel
*l2
= gen_new_label();
5428 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5429 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5432 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5439 TCGLabel
*l1
= gen_new_label();
5440 TCGLabel
*l2
= gen_new_label();
5441 TCGLabel
*l3
= gen_new_label();
5442 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5443 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5444 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5446 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5449 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5453 case OPC_DMODU_G_2E
:
5454 case OPC_DMODU_G_2F
:
5456 TCGLabel
*l1
= gen_new_label();
5457 TCGLabel
*l2
= gen_new_label();
5458 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5459 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5462 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5473 /* Loongson multimedia instructions */
5474 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5476 uint32_t opc
, shift_max
;
5479 opc
= MASK_LMI(ctx
->opcode
);
5485 t0
= tcg_temp_local_new_i64();
5486 t1
= tcg_temp_local_new_i64();
5489 t0
= tcg_temp_new_i64();
5490 t1
= tcg_temp_new_i64();
5494 check_cp1_enabled(ctx
);
5495 gen_load_fpr64(ctx
, t0
, rs
);
5496 gen_load_fpr64(ctx
, t1
, rt
);
5498 #define LMI_HELPER(UP, LO) \
5499 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5500 #define LMI_HELPER_1(UP, LO) \
5501 case OPC_##UP: gen_helper_##LO(t0, t0); break
5502 #define LMI_DIRECT(UP, LO, OP) \
5503 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5506 LMI_HELPER(PADDSH
, paddsh
);
5507 LMI_HELPER(PADDUSH
, paddush
);
5508 LMI_HELPER(PADDH
, paddh
);
5509 LMI_HELPER(PADDW
, paddw
);
5510 LMI_HELPER(PADDSB
, paddsb
);
5511 LMI_HELPER(PADDUSB
, paddusb
);
5512 LMI_HELPER(PADDB
, paddb
);
5514 LMI_HELPER(PSUBSH
, psubsh
);
5515 LMI_HELPER(PSUBUSH
, psubush
);
5516 LMI_HELPER(PSUBH
, psubh
);
5517 LMI_HELPER(PSUBW
, psubw
);
5518 LMI_HELPER(PSUBSB
, psubsb
);
5519 LMI_HELPER(PSUBUSB
, psubusb
);
5520 LMI_HELPER(PSUBB
, psubb
);
5522 LMI_HELPER(PSHUFH
, pshufh
);
5523 LMI_HELPER(PACKSSWH
, packsswh
);
5524 LMI_HELPER(PACKSSHB
, packsshb
);
5525 LMI_HELPER(PACKUSHB
, packushb
);
5527 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5528 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5529 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5530 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5531 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5532 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5534 LMI_HELPER(PAVGH
, pavgh
);
5535 LMI_HELPER(PAVGB
, pavgb
);
5536 LMI_HELPER(PMAXSH
, pmaxsh
);
5537 LMI_HELPER(PMINSH
, pminsh
);
5538 LMI_HELPER(PMAXUB
, pmaxub
);
5539 LMI_HELPER(PMINUB
, pminub
);
5541 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5542 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5543 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5544 LMI_HELPER(PCMPGTH
, pcmpgth
);
5545 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5546 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5548 LMI_HELPER(PSLLW
, psllw
);
5549 LMI_HELPER(PSLLH
, psllh
);
5550 LMI_HELPER(PSRLW
, psrlw
);
5551 LMI_HELPER(PSRLH
, psrlh
);
5552 LMI_HELPER(PSRAW
, psraw
);
5553 LMI_HELPER(PSRAH
, psrah
);
5555 LMI_HELPER(PMULLH
, pmullh
);
5556 LMI_HELPER(PMULHH
, pmulhh
);
5557 LMI_HELPER(PMULHUH
, pmulhuh
);
5558 LMI_HELPER(PMADDHW
, pmaddhw
);
5560 LMI_HELPER(PASUBUB
, pasubub
);
5561 LMI_HELPER_1(BIADD
, biadd
);
5562 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5564 LMI_DIRECT(PADDD
, paddd
, add
);
5565 LMI_DIRECT(PSUBD
, psubd
, sub
);
5566 LMI_DIRECT(XOR_CP2
, xor, xor);
5567 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5568 LMI_DIRECT(AND_CP2
, and, and);
5569 LMI_DIRECT(OR_CP2
, or, or);
5572 tcg_gen_andc_i64(t0
, t1
, t0
);
5576 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5579 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5582 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5585 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5589 tcg_gen_andi_i64(t1
, t1
, 3);
5590 tcg_gen_shli_i64(t1
, t1
, 4);
5591 tcg_gen_shr_i64(t0
, t0
, t1
);
5592 tcg_gen_ext16u_i64(t0
, t0
);
5596 tcg_gen_add_i64(t0
, t0
, t1
);
5597 tcg_gen_ext32s_i64(t0
, t0
);
5600 tcg_gen_sub_i64(t0
, t0
, t1
);
5601 tcg_gen_ext32s_i64(t0
, t0
);
5623 /* Make sure shift count isn't TCG undefined behaviour. */
5624 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5629 tcg_gen_shl_i64(t0
, t0
, t1
);
5633 /* Since SRA is UndefinedResult without sign-extended inputs,
5634 we can treat SRA and DSRA the same. */
5635 tcg_gen_sar_i64(t0
, t0
, t1
);
5638 /* We want to shift in zeros for SRL; zero-extend first. */
5639 tcg_gen_ext32u_i64(t0
, t0
);
5642 tcg_gen_shr_i64(t0
, t0
, t1
);
5646 if (shift_max
== 32) {
5647 tcg_gen_ext32s_i64(t0
, t0
);
5650 /* Shifts larger than MAX produce zero. */
5651 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5652 tcg_gen_neg_i64(t1
, t1
);
5653 tcg_gen_and_i64(t0
, t0
, t1
);
5659 TCGv_i64 t2
= tcg_temp_new_i64();
5660 TCGLabel
*lab
= gen_new_label();
5662 tcg_gen_mov_i64(t2
, t0
);
5663 tcg_gen_add_i64(t0
, t1
, t2
);
5664 if (opc
== OPC_ADD_CP2
) {
5665 tcg_gen_ext32s_i64(t0
, t0
);
5667 tcg_gen_xor_i64(t1
, t1
, t2
);
5668 tcg_gen_xor_i64(t2
, t2
, t0
);
5669 tcg_gen_andc_i64(t1
, t2
, t1
);
5670 tcg_temp_free_i64(t2
);
5671 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5672 generate_exception(ctx
, EXCP_OVERFLOW
);
5680 TCGv_i64 t2
= tcg_temp_new_i64();
5681 TCGLabel
*lab
= gen_new_label();
5683 tcg_gen_mov_i64(t2
, t0
);
5684 tcg_gen_sub_i64(t0
, t1
, t2
);
5685 if (opc
== OPC_SUB_CP2
) {
5686 tcg_gen_ext32s_i64(t0
, t0
);
5688 tcg_gen_xor_i64(t1
, t1
, t2
);
5689 tcg_gen_xor_i64(t2
, t2
, t0
);
5690 tcg_gen_and_i64(t1
, t1
, t2
);
5691 tcg_temp_free_i64(t2
);
5692 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5693 generate_exception(ctx
, EXCP_OVERFLOW
);
5699 tcg_gen_ext32u_i64(t0
, t0
);
5700 tcg_gen_ext32u_i64(t1
, t1
);
5701 tcg_gen_mul_i64(t0
, t0
, t1
);
5710 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5711 FD field is the CC field? */
5713 MIPS_INVAL("loongson_cp2");
5714 generate_exception_end(ctx
, EXCP_RI
);
5721 gen_store_fpr64(ctx
, t0
, rd
);
5723 tcg_temp_free_i64(t0
);
5724 tcg_temp_free_i64(t1
);
5728 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5729 int rs
, int rt
, int16_t imm
)
5732 TCGv t0
= tcg_temp_new();
5733 TCGv t1
= tcg_temp_new();
5736 /* Load needed operands */
5744 /* Compare two registers */
5746 gen_load_gpr(t0
, rs
);
5747 gen_load_gpr(t1
, rt
);
5757 /* Compare register to immediate */
5758 if (rs
!= 0 || imm
!= 0) {
5759 gen_load_gpr(t0
, rs
);
5760 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5767 case OPC_TEQ
: /* rs == rs */
5768 case OPC_TEQI
: /* r0 == 0 */
5769 case OPC_TGE
: /* rs >= rs */
5770 case OPC_TGEI
: /* r0 >= 0 */
5771 case OPC_TGEU
: /* rs >= rs unsigned */
5772 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5774 generate_exception_end(ctx
, EXCP_TRAP
);
5776 case OPC_TLT
: /* rs < rs */
5777 case OPC_TLTI
: /* r0 < 0 */
5778 case OPC_TLTU
: /* rs < rs unsigned */
5779 case OPC_TLTIU
: /* r0 < 0 unsigned */
5780 case OPC_TNE
: /* rs != rs */
5781 case OPC_TNEI
: /* r0 != 0 */
5782 /* Never trap: treat as NOP. */
5786 TCGLabel
*l1
= gen_new_label();
5791 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5795 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5799 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5803 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5807 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5811 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5814 generate_exception(ctx
, EXCP_TRAP
);
5821 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5823 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5827 #ifndef CONFIG_USER_ONLY
5828 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5834 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5836 if (use_goto_tb(ctx
, dest
)) {
5839 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5842 if (ctx
->base
.singlestep_enabled
) {
5843 save_cpu_state(ctx
, 0);
5844 gen_helper_raise_exception_debug(cpu_env
);
5846 tcg_gen_lookup_and_goto_ptr();
5850 /* Branches (before delay slot) */
5851 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5853 int rs
, int rt
, int32_t offset
,
5856 target_ulong btgt
= -1;
5858 int bcond_compute
= 0;
5859 TCGv t0
= tcg_temp_new();
5860 TCGv t1
= tcg_temp_new();
5862 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5863 #ifdef MIPS_DEBUG_DISAS
5864 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5865 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5867 generate_exception_end(ctx
, EXCP_RI
);
5871 /* Load needed operands */
5877 /* Compare two registers */
5879 gen_load_gpr(t0
, rs
);
5880 gen_load_gpr(t1
, rt
);
5883 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5897 /* Compare to zero */
5899 gen_load_gpr(t0
, rs
);
5902 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5905 #if defined(TARGET_MIPS64)
5907 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5909 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5912 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5917 /* Jump to immediate */
5918 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5923 /* Jump to register */
5924 if (offset
!= 0 && offset
!= 16) {
5925 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5926 others are reserved. */
5927 MIPS_INVAL("jump hint");
5928 generate_exception_end(ctx
, EXCP_RI
);
5931 gen_load_gpr(btarget
, rs
);
5934 MIPS_INVAL("branch/jump");
5935 generate_exception_end(ctx
, EXCP_RI
);
5938 if (bcond_compute
== 0) {
5939 /* No condition to be computed */
5941 case OPC_BEQ
: /* rx == rx */
5942 case OPC_BEQL
: /* rx == rx likely */
5943 case OPC_BGEZ
: /* 0 >= 0 */
5944 case OPC_BGEZL
: /* 0 >= 0 likely */
5945 case OPC_BLEZ
: /* 0 <= 0 */
5946 case OPC_BLEZL
: /* 0 <= 0 likely */
5948 ctx
->hflags
|= MIPS_HFLAG_B
;
5950 case OPC_BGEZAL
: /* 0 >= 0 */
5951 case OPC_BGEZALL
: /* 0 >= 0 likely */
5952 /* Always take and link */
5954 ctx
->hflags
|= MIPS_HFLAG_B
;
5956 case OPC_BNE
: /* rx != rx */
5957 case OPC_BGTZ
: /* 0 > 0 */
5958 case OPC_BLTZ
: /* 0 < 0 */
5961 case OPC_BLTZAL
: /* 0 < 0 */
5962 /* Handle as an unconditional branch to get correct delay
5965 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5966 ctx
->hflags
|= MIPS_HFLAG_B
;
5968 case OPC_BLTZALL
: /* 0 < 0 likely */
5969 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5970 /* Skip the instruction in the delay slot */
5971 ctx
->base
.pc_next
+= 4;
5973 case OPC_BNEL
: /* rx != rx likely */
5974 case OPC_BGTZL
: /* 0 > 0 likely */
5975 case OPC_BLTZL
: /* 0 < 0 likely */
5976 /* Skip the instruction in the delay slot */
5977 ctx
->base
.pc_next
+= 4;
5980 ctx
->hflags
|= MIPS_HFLAG_B
;
5983 ctx
->hflags
|= MIPS_HFLAG_BX
;
5987 ctx
->hflags
|= MIPS_HFLAG_B
;
5990 ctx
->hflags
|= MIPS_HFLAG_BR
;
5994 ctx
->hflags
|= MIPS_HFLAG_BR
;
5997 MIPS_INVAL("branch/jump");
5998 generate_exception_end(ctx
, EXCP_RI
);
6004 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6007 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6010 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6013 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6016 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6019 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6022 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6026 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6030 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6033 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6036 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6039 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6042 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6045 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6048 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6050 #if defined(TARGET_MIPS64)
6052 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6056 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6059 ctx
->hflags
|= MIPS_HFLAG_BC
;
6062 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6065 ctx
->hflags
|= MIPS_HFLAG_BL
;
6068 MIPS_INVAL("conditional branch/jump");
6069 generate_exception_end(ctx
, EXCP_RI
);
6074 ctx
->btarget
= btgt
;
6076 switch (delayslot_size
) {
6078 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6081 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6086 int post_delay
= insn_bytes
+ delayslot_size
;
6087 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6089 tcg_gen_movi_tl(cpu_gpr
[blink
],
6090 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6094 if (insn_bytes
== 2)
6095 ctx
->hflags
|= MIPS_HFLAG_B16
;
6101 /* nanoMIPS Branches */
6102 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6104 int rs
, int rt
, int32_t offset
)
6106 target_ulong btgt
= -1;
6107 int bcond_compute
= 0;
6108 TCGv t0
= tcg_temp_new();
6109 TCGv t1
= tcg_temp_new();
6111 /* Load needed operands */
6115 /* Compare two registers */
6117 gen_load_gpr(t0
, rs
);
6118 gen_load_gpr(t1
, rt
);
6121 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6124 /* Compare to zero */
6126 gen_load_gpr(t0
, rs
);
6129 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6132 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6134 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6138 /* Jump to register */
6139 if (offset
!= 0 && offset
!= 16) {
6140 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6141 others are reserved. */
6142 MIPS_INVAL("jump hint");
6143 generate_exception_end(ctx
, EXCP_RI
);
6146 gen_load_gpr(btarget
, rs
);
6149 MIPS_INVAL("branch/jump");
6150 generate_exception_end(ctx
, EXCP_RI
);
6153 if (bcond_compute
== 0) {
6154 /* No condition to be computed */
6156 case OPC_BEQ
: /* rx == rx */
6158 ctx
->hflags
|= MIPS_HFLAG_B
;
6160 case OPC_BGEZAL
: /* 0 >= 0 */
6161 /* Always take and link */
6162 tcg_gen_movi_tl(cpu_gpr
[31],
6163 ctx
->base
.pc_next
+ insn_bytes
);
6164 ctx
->hflags
|= MIPS_HFLAG_B
;
6166 case OPC_BNE
: /* rx != rx */
6167 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6168 /* Skip the instruction in the delay slot */
6169 ctx
->base
.pc_next
+= 4;
6172 ctx
->hflags
|= MIPS_HFLAG_BR
;
6176 tcg_gen_movi_tl(cpu_gpr
[rt
],
6177 ctx
->base
.pc_next
+ insn_bytes
);
6179 ctx
->hflags
|= MIPS_HFLAG_BR
;
6182 MIPS_INVAL("branch/jump");
6183 generate_exception_end(ctx
, EXCP_RI
);
6189 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6192 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6195 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6196 tcg_gen_movi_tl(cpu_gpr
[31],
6197 ctx
->base
.pc_next
+ insn_bytes
);
6200 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6202 ctx
->hflags
|= MIPS_HFLAG_BC
;
6205 MIPS_INVAL("conditional branch/jump");
6206 generate_exception_end(ctx
, EXCP_RI
);
6211 ctx
->btarget
= btgt
;
6214 if (insn_bytes
== 2) {
6215 ctx
->hflags
|= MIPS_HFLAG_B16
;
6222 /* special3 bitfield operations */
6223 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
6224 int rs
, int lsb
, int msb
)
6226 TCGv t0
= tcg_temp_new();
6227 TCGv t1
= tcg_temp_new();
6229 gen_load_gpr(t1
, rs
);
6232 if (lsb
+ msb
> 31) {
6236 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6238 /* The two checks together imply that lsb == 0,
6239 so this is a simple sign-extension. */
6240 tcg_gen_ext32s_tl(t0
, t1
);
6243 #if defined(TARGET_MIPS64)
6252 if (lsb
+ msb
> 63) {
6255 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6262 gen_load_gpr(t0
, rt
);
6263 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6264 tcg_gen_ext32s_tl(t0
, t0
);
6266 #if defined(TARGET_MIPS64)
6277 gen_load_gpr(t0
, rt
);
6278 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6283 MIPS_INVAL("bitops");
6284 generate_exception_end(ctx
, EXCP_RI
);
6289 gen_store_gpr(t0
, rt
);
6294 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6299 /* If no destination, treat it as a NOP. */
6303 t0
= tcg_temp_new();
6304 gen_load_gpr(t0
, rt
);
6308 TCGv t1
= tcg_temp_new();
6309 TCGv t2
= tcg_const_tl(0x00FF00FF);
6311 tcg_gen_shri_tl(t1
, t0
, 8);
6312 tcg_gen_and_tl(t1
, t1
, t2
);
6313 tcg_gen_and_tl(t0
, t0
, t2
);
6314 tcg_gen_shli_tl(t0
, t0
, 8);
6315 tcg_gen_or_tl(t0
, t0
, t1
);
6318 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6322 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6325 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6327 #if defined(TARGET_MIPS64)
6330 TCGv t1
= tcg_temp_new();
6331 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6333 tcg_gen_shri_tl(t1
, t0
, 8);
6334 tcg_gen_and_tl(t1
, t1
, t2
);
6335 tcg_gen_and_tl(t0
, t0
, t2
);
6336 tcg_gen_shli_tl(t0
, t0
, 8);
6337 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6344 TCGv t1
= tcg_temp_new();
6345 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6347 tcg_gen_shri_tl(t1
, t0
, 16);
6348 tcg_gen_and_tl(t1
, t1
, t2
);
6349 tcg_gen_and_tl(t0
, t0
, t2
);
6350 tcg_gen_shli_tl(t0
, t0
, 16);
6351 tcg_gen_or_tl(t0
, t0
, t1
);
6352 tcg_gen_shri_tl(t1
, t0
, 32);
6353 tcg_gen_shli_tl(t0
, t0
, 32);
6354 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6361 MIPS_INVAL("bsfhl");
6362 generate_exception_end(ctx
, EXCP_RI
);
6369 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6378 t0
= tcg_temp_new();
6379 t1
= tcg_temp_new();
6380 gen_load_gpr(t0
, rs
);
6381 gen_load_gpr(t1
, rt
);
6382 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6383 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6384 if (opc
== OPC_LSA
) {
6385 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6394 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6402 t0
= tcg_temp_new();
6403 if (bits
== 0 || bits
== wordsz
) {
6405 gen_load_gpr(t0
, rt
);
6407 gen_load_gpr(t0
, rs
);
6411 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6413 #if defined(TARGET_MIPS64)
6415 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6420 TCGv t1
= tcg_temp_new();
6421 gen_load_gpr(t0
, rt
);
6422 gen_load_gpr(t1
, rs
);
6426 TCGv_i64 t2
= tcg_temp_new_i64();
6427 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6428 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6429 gen_move_low32(cpu_gpr
[rd
], t2
);
6430 tcg_temp_free_i64(t2
);
6433 #if defined(TARGET_MIPS64)
6435 tcg_gen_shli_tl(t0
, t0
, bits
);
6436 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6437 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6447 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6450 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6453 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6456 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6459 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6466 t0
= tcg_temp_new();
6467 gen_load_gpr(t0
, rt
);
6470 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6472 #if defined(TARGET_MIPS64)
6474 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6481 #ifndef CONFIG_USER_ONLY
6482 /* CP0 (MMU and control) */
6483 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6485 TCGv_i64 t0
= tcg_temp_new_i64();
6486 TCGv_i64 t1
= tcg_temp_new_i64();
6488 tcg_gen_ext_tl_i64(t0
, arg
);
6489 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6490 #if defined(TARGET_MIPS64)
6491 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6493 tcg_gen_concat32_i64(t1
, t1
, t0
);
6495 tcg_gen_st_i64(t1
, cpu_env
, off
);
6496 tcg_temp_free_i64(t1
);
6497 tcg_temp_free_i64(t0
);
6500 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6502 TCGv_i64 t0
= tcg_temp_new_i64();
6503 TCGv_i64 t1
= tcg_temp_new_i64();
6505 tcg_gen_ext_tl_i64(t0
, arg
);
6506 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6507 tcg_gen_concat32_i64(t1
, t1
, t0
);
6508 tcg_gen_st_i64(t1
, cpu_env
, off
);
6509 tcg_temp_free_i64(t1
);
6510 tcg_temp_free_i64(t0
);
6513 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6515 TCGv_i64 t0
= tcg_temp_new_i64();
6517 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6518 #if defined(TARGET_MIPS64)
6519 tcg_gen_shri_i64(t0
, t0
, 30);
6521 tcg_gen_shri_i64(t0
, t0
, 32);
6523 gen_move_low32(arg
, t0
);
6524 tcg_temp_free_i64(t0
);
6527 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6529 TCGv_i64 t0
= tcg_temp_new_i64();
6531 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6532 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6533 gen_move_low32(arg
, t0
);
6534 tcg_temp_free_i64(t0
);
6537 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
6539 TCGv_i32 t0
= tcg_temp_new_i32();
6541 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6542 tcg_gen_ext_i32_tl(arg
, t0
);
6543 tcg_temp_free_i32(t0
);
6546 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
6548 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6549 tcg_gen_ext32s_tl(arg
, arg
);
6552 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
6554 TCGv_i32 t0
= tcg_temp_new_i32();
6556 tcg_gen_trunc_tl_i32(t0
, arg
);
6557 tcg_gen_st_i32(t0
, cpu_env
, off
);
6558 tcg_temp_free_i32(t0
);
6561 #define CP0_CHECK(c) \
6564 goto cp0_unimplemented; \
6568 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6570 const char *rn
= "invalid";
6576 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6577 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6581 goto cp0_unimplemented
;
6587 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6588 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6592 goto cp0_unimplemented
;
6598 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
6599 ctx
->CP0_LLAddr_shift
);
6603 CP0_CHECK(ctx
->mrp
);
6604 gen_helper_mfhc0_maar(arg
, cpu_env
);
6608 goto cp0_unimplemented
;
6617 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6621 goto cp0_unimplemented
;
6625 goto cp0_unimplemented
;
6627 trace_mips_translate_c0("mfhc0", rn
, reg
, sel
);
6631 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6632 tcg_gen_movi_tl(arg
, 0);
6635 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6637 const char *rn
= "invalid";
6638 uint64_t mask
= ctx
->PAMask
>> 36;
6644 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6645 tcg_gen_andi_tl(arg
, arg
, mask
);
6646 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6650 goto cp0_unimplemented
;
6656 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6657 tcg_gen_andi_tl(arg
, arg
, mask
);
6658 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6662 goto cp0_unimplemented
;
6668 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6669 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6670 relevant for modern MIPS cores supporting MTHC0, therefore
6671 treating MTHC0 to LLAddr as NOP. */
6675 CP0_CHECK(ctx
->mrp
);
6676 gen_helper_mthc0_maar(cpu_env
, arg
);
6680 goto cp0_unimplemented
;
6689 tcg_gen_andi_tl(arg
, arg
, mask
);
6690 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6694 goto cp0_unimplemented
;
6698 goto cp0_unimplemented
;
6700 trace_mips_translate_c0("mthc0", rn
, reg
, sel
);
6703 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6706 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6708 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6709 tcg_gen_movi_tl(arg
, 0);
6711 tcg_gen_movi_tl(arg
, ~0);
6715 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6717 const char *rn
= "invalid";
6720 check_insn(ctx
, ISA_MIPS32
);
6726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6730 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6731 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6735 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6736 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6740 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6741 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6750 goto cp0_unimplemented
;
6756 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6757 gen_helper_mfc0_random(arg
, cpu_env
);
6761 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6762 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6766 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6767 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6771 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6772 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6776 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6777 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6781 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6782 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6786 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6787 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6788 rn
= "VPEScheFBack";
6791 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6796 goto cp0_unimplemented
;
6803 TCGv_i64 tmp
= tcg_temp_new_i64();
6804 tcg_gen_ld_i64(tmp
, cpu_env
,
6805 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6806 #if defined(TARGET_MIPS64)
6808 /* Move RI/XI fields to bits 31:30 */
6809 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6810 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6813 gen_move_low32(arg
, tmp
);
6814 tcg_temp_free_i64(tmp
);
6819 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6820 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6824 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6825 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6829 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6830 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6834 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6835 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6839 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6840 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6844 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6845 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6849 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6850 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6854 goto cp0_unimplemented
;
6861 TCGv_i64 tmp
= tcg_temp_new_i64();
6862 tcg_gen_ld_i64(tmp
, cpu_env
,
6863 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6864 #if defined(TARGET_MIPS64)
6866 /* Move RI/XI fields to bits 31:30 */
6867 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6868 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6871 gen_move_low32(arg
, tmp
);
6872 tcg_temp_free_i64(tmp
);
6878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6879 rn
= "GlobalNumber";
6882 goto cp0_unimplemented
;
6888 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6889 tcg_gen_ext32s_tl(arg
, arg
);
6893 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6894 rn
= "ContextConfig";
6895 goto cp0_unimplemented
;
6897 CP0_CHECK(ctx
->ulri
);
6898 tcg_gen_ld_tl(arg
, cpu_env
,
6899 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6900 tcg_gen_ext32s_tl(arg
, arg
);
6904 goto cp0_unimplemented
;
6910 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6914 check_insn(ctx
, ISA_MIPS32R2
);
6915 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6920 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6921 tcg_gen_ext32s_tl(arg
, arg
);
6926 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6927 tcg_gen_ext32s_tl(arg
, arg
);
6932 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6933 tcg_gen_ext32s_tl(arg
, arg
);
6938 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6943 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6948 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6952 goto cp0_unimplemented
;
6958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6962 check_insn(ctx
, ISA_MIPS32R2
);
6963 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6967 check_insn(ctx
, ISA_MIPS32R2
);
6968 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6972 check_insn(ctx
, ISA_MIPS32R2
);
6973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6977 check_insn(ctx
, ISA_MIPS32R2
);
6978 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6982 check_insn(ctx
, ISA_MIPS32R2
);
6983 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6988 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
6992 goto cp0_unimplemented
;
6998 check_insn(ctx
, ISA_MIPS32R2
);
6999 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7003 goto cp0_unimplemented
;
7009 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7010 tcg_gen_ext32s_tl(arg
, arg
);
7015 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7020 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7025 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7026 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7030 goto cp0_unimplemented
;
7036 /* Mark as an IO operation because we read the time. */
7037 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7040 gen_helper_mfc0_count(arg
, cpu_env
);
7041 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7044 /* Break the TB to be able to take timer interrupts immediately
7045 after reading count. DISAS_STOP isn't sufficient, we need to
7046 ensure we break completely out of translated code. */
7047 gen_save_pc(ctx
->base
.pc_next
+ 4);
7048 ctx
->base
.is_jmp
= DISAS_EXIT
;
7051 /* 6,7 are implementation dependent */
7053 goto cp0_unimplemented
;
7059 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7060 tcg_gen_ext32s_tl(arg
, arg
);
7064 goto cp0_unimplemented
;
7070 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7073 /* 6,7 are implementation dependent */
7075 goto cp0_unimplemented
;
7081 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7085 check_insn(ctx
, ISA_MIPS32R2
);
7086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7090 check_insn(ctx
, ISA_MIPS32R2
);
7091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7095 check_insn(ctx
, ISA_MIPS32R2
);
7096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7100 goto cp0_unimplemented
;
7106 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7110 goto cp0_unimplemented
;
7116 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7117 tcg_gen_ext32s_tl(arg
, arg
);
7121 goto cp0_unimplemented
;
7127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7131 check_insn(ctx
, ISA_MIPS32R2
);
7132 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7133 tcg_gen_ext32s_tl(arg
, arg
);
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 CP0_CHECK(ctx
->cmgcr
);
7139 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7140 tcg_gen_ext32s_tl(arg
, arg
);
7144 goto cp0_unimplemented
;
7150 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7154 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7162 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7166 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7170 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7173 /* 6,7 are implementation dependent */
7175 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7179 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7183 goto cp0_unimplemented
;
7189 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7193 CP0_CHECK(ctx
->mrp
);
7194 gen_helper_mfc0_maar(arg
, cpu_env
);
7198 CP0_CHECK(ctx
->mrp
);
7199 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7203 goto cp0_unimplemented
;
7216 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7217 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7221 goto cp0_unimplemented
;
7234 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7235 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7239 goto cp0_unimplemented
;
7245 #if defined(TARGET_MIPS64)
7246 check_insn(ctx
, ISA_MIPS3
);
7247 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7248 tcg_gen_ext32s_tl(arg
, arg
);
7253 goto cp0_unimplemented
;
7257 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7258 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7261 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7265 goto cp0_unimplemented
;
7269 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7270 rn
= "'Diagnostic"; /* implementation dependent */
7275 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7279 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7280 rn
= "TraceControl";
7281 goto cp0_unimplemented
;
7283 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7284 rn
= "TraceControl2";
7285 goto cp0_unimplemented
;
7287 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7288 rn
= "UserTraceData";
7289 goto cp0_unimplemented
;
7291 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7293 goto cp0_unimplemented
;
7295 goto cp0_unimplemented
;
7302 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7303 tcg_gen_ext32s_tl(arg
, arg
);
7307 goto cp0_unimplemented
;
7313 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7314 rn
= "Performance0";
7317 // gen_helper_mfc0_performance1(arg);
7318 rn
= "Performance1";
7319 goto cp0_unimplemented
;
7321 // gen_helper_mfc0_performance2(arg);
7322 rn
= "Performance2";
7323 goto cp0_unimplemented
;
7325 // gen_helper_mfc0_performance3(arg);
7326 rn
= "Performance3";
7327 goto cp0_unimplemented
;
7329 // gen_helper_mfc0_performance4(arg);
7330 rn
= "Performance4";
7331 goto cp0_unimplemented
;
7333 // gen_helper_mfc0_performance5(arg);
7334 rn
= "Performance5";
7335 goto cp0_unimplemented
;
7337 // gen_helper_mfc0_performance6(arg);
7338 rn
= "Performance6";
7339 goto cp0_unimplemented
;
7341 // gen_helper_mfc0_performance7(arg);
7342 rn
= "Performance7";
7343 goto cp0_unimplemented
;
7345 goto cp0_unimplemented
;
7351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7355 goto cp0_unimplemented
;
7364 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7368 goto cp0_unimplemented
;
7378 TCGv_i64 tmp
= tcg_temp_new_i64();
7379 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7380 gen_move_low32(arg
, tmp
);
7381 tcg_temp_free_i64(tmp
);
7389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7393 goto cp0_unimplemented
;
7402 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7409 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7413 goto cp0_unimplemented
;
7419 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7420 tcg_gen_ext32s_tl(arg
, arg
);
7424 goto cp0_unimplemented
;
7431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7440 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7441 tcg_gen_ld_tl(arg
, cpu_env
,
7442 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7443 tcg_gen_ext32s_tl(arg
, arg
);
7447 goto cp0_unimplemented
;
7451 goto cp0_unimplemented
;
7453 trace_mips_translate_c0("mfc0", rn
, reg
, sel
);
7457 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7458 gen_mfc0_unimplemented(ctx
, arg
);
7461 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7463 const char *rn
= "invalid";
7466 check_insn(ctx
, ISA_MIPS32
);
7468 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7476 gen_helper_mtc0_index(cpu_env
, arg
);
7480 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7481 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7485 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7490 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7500 goto cp0_unimplemented
;
7510 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7511 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7515 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7516 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7520 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7521 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7525 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7526 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7530 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7531 tcg_gen_st_tl(arg
, cpu_env
,
7532 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7536 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7537 tcg_gen_st_tl(arg
, cpu_env
,
7538 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7539 rn
= "VPEScheFBack";
7542 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7543 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7547 goto cp0_unimplemented
;
7553 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7557 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7558 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7562 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7563 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7567 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7568 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7572 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7573 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7577 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7578 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7582 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7583 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7587 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7588 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7592 goto cp0_unimplemented
;
7598 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7604 rn
= "GlobalNumber";
7607 goto cp0_unimplemented
;
7613 gen_helper_mtc0_context(cpu_env
, arg
);
7617 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7618 rn
= "ContextConfig";
7619 goto cp0_unimplemented
;
7621 CP0_CHECK(ctx
->ulri
);
7622 tcg_gen_st_tl(arg
, cpu_env
,
7623 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7627 goto cp0_unimplemented
;
7633 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7637 check_insn(ctx
, ISA_MIPS32R2
);
7638 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7640 ctx
->base
.is_jmp
= DISAS_STOP
;
7644 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7649 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7654 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7659 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7664 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7669 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7673 goto cp0_unimplemented
;
7679 gen_helper_mtc0_wired(cpu_env
, arg
);
7683 check_insn(ctx
, ISA_MIPS32R2
);
7684 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7688 check_insn(ctx
, ISA_MIPS32R2
);
7689 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7693 check_insn(ctx
, ISA_MIPS32R2
);
7694 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7698 check_insn(ctx
, ISA_MIPS32R2
);
7699 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7703 check_insn(ctx
, ISA_MIPS32R2
);
7704 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7709 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7713 goto cp0_unimplemented
;
7719 check_insn(ctx
, ISA_MIPS32R2
);
7720 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7721 ctx
->base
.is_jmp
= DISAS_STOP
;
7725 goto cp0_unimplemented
;
7747 goto cp0_unimplemented
;
7753 gen_helper_mtc0_count(cpu_env
, arg
);
7756 /* 6,7 are implementation dependent */
7758 goto cp0_unimplemented
;
7764 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7768 goto cp0_unimplemented
;
7774 gen_helper_mtc0_compare(cpu_env
, arg
);
7777 /* 6,7 are implementation dependent */
7779 goto cp0_unimplemented
;
7785 save_cpu_state(ctx
, 1);
7786 gen_helper_mtc0_status(cpu_env
, arg
);
7787 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7788 gen_save_pc(ctx
->base
.pc_next
+ 4);
7789 ctx
->base
.is_jmp
= DISAS_EXIT
;
7793 check_insn(ctx
, ISA_MIPS32R2
);
7794 gen_helper_mtc0_intctl(cpu_env
, arg
);
7795 /* Stop translation as we may have switched the execution mode */
7796 ctx
->base
.is_jmp
= DISAS_STOP
;
7800 check_insn(ctx
, ISA_MIPS32R2
);
7801 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7802 /* Stop translation as we may have switched the execution mode */
7803 ctx
->base
.is_jmp
= DISAS_STOP
;
7807 check_insn(ctx
, ISA_MIPS32R2
);
7808 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7809 /* Stop translation as we may have switched the execution mode */
7810 ctx
->base
.is_jmp
= DISAS_STOP
;
7814 goto cp0_unimplemented
;
7820 save_cpu_state(ctx
, 1);
7821 gen_helper_mtc0_cause(cpu_env
, arg
);
7822 /* Stop translation as we may have triggered an interrupt.
7823 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7824 * translated code to check for pending interrupts. */
7825 gen_save_pc(ctx
->base
.pc_next
+ 4);
7826 ctx
->base
.is_jmp
= DISAS_EXIT
;
7830 goto cp0_unimplemented
;
7836 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7840 goto cp0_unimplemented
;
7850 check_insn(ctx
, ISA_MIPS32R2
);
7851 gen_helper_mtc0_ebase(cpu_env
, arg
);
7855 goto cp0_unimplemented
;
7861 gen_helper_mtc0_config0(cpu_env
, arg
);
7863 /* Stop translation as we may have switched the execution mode */
7864 ctx
->base
.is_jmp
= DISAS_STOP
;
7867 /* ignored, read only */
7871 gen_helper_mtc0_config2(cpu_env
, arg
);
7873 /* Stop translation as we may have switched the execution mode */
7874 ctx
->base
.is_jmp
= DISAS_STOP
;
7877 gen_helper_mtc0_config3(cpu_env
, arg
);
7879 /* Stop translation as we may have switched the execution mode */
7880 ctx
->base
.is_jmp
= DISAS_STOP
;
7883 gen_helper_mtc0_config4(cpu_env
, arg
);
7885 ctx
->base
.is_jmp
= DISAS_STOP
;
7888 gen_helper_mtc0_config5(cpu_env
, arg
);
7890 /* Stop translation as we may have switched the execution mode */
7891 ctx
->base
.is_jmp
= DISAS_STOP
;
7893 /* 6,7 are implementation dependent */
7903 rn
= "Invalid config selector";
7904 goto cp0_unimplemented
;
7910 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7914 CP0_CHECK(ctx
->mrp
);
7915 gen_helper_mtc0_maar(cpu_env
, arg
);
7919 CP0_CHECK(ctx
->mrp
);
7920 gen_helper_mtc0_maari(cpu_env
, arg
);
7924 goto cp0_unimplemented
;
7937 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7938 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7942 goto cp0_unimplemented
;
7955 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7956 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7960 goto cp0_unimplemented
;
7966 #if defined(TARGET_MIPS64)
7967 check_insn(ctx
, ISA_MIPS3
);
7968 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7973 goto cp0_unimplemented
;
7977 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7978 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7981 gen_helper_mtc0_framemask(cpu_env
, arg
);
7985 goto cp0_unimplemented
;
7990 rn
= "Diagnostic"; /* implementation dependent */
7995 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7996 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7997 gen_save_pc(ctx
->base
.pc_next
+ 4);
7998 ctx
->base
.is_jmp
= DISAS_EXIT
;
8002 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8003 rn
= "TraceControl";
8004 /* Stop translation as we may have switched the execution mode */
8005 ctx
->base
.is_jmp
= DISAS_STOP
;
8006 goto cp0_unimplemented
;
8008 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8009 rn
= "TraceControl2";
8010 /* Stop translation as we may have switched the execution mode */
8011 ctx
->base
.is_jmp
= DISAS_STOP
;
8012 goto cp0_unimplemented
;
8014 /* Stop translation as we may have switched the execution mode */
8015 ctx
->base
.is_jmp
= DISAS_STOP
;
8016 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8017 rn
= "UserTraceData";
8018 /* Stop translation as we may have switched the execution mode */
8019 ctx
->base
.is_jmp
= DISAS_STOP
;
8020 goto cp0_unimplemented
;
8022 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8023 /* Stop translation as we may have switched the execution mode */
8024 ctx
->base
.is_jmp
= DISAS_STOP
;
8026 goto cp0_unimplemented
;
8028 goto cp0_unimplemented
;
8035 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8039 goto cp0_unimplemented
;
8045 gen_helper_mtc0_performance0(cpu_env
, arg
);
8046 rn
= "Performance0";
8049 // gen_helper_mtc0_performance1(arg);
8050 rn
= "Performance1";
8051 goto cp0_unimplemented
;
8053 // gen_helper_mtc0_performance2(arg);
8054 rn
= "Performance2";
8055 goto cp0_unimplemented
;
8057 // gen_helper_mtc0_performance3(arg);
8058 rn
= "Performance3";
8059 goto cp0_unimplemented
;
8061 // gen_helper_mtc0_performance4(arg);
8062 rn
= "Performance4";
8063 goto cp0_unimplemented
;
8065 // gen_helper_mtc0_performance5(arg);
8066 rn
= "Performance5";
8067 goto cp0_unimplemented
;
8069 // gen_helper_mtc0_performance6(arg);
8070 rn
= "Performance6";
8071 goto cp0_unimplemented
;
8073 // gen_helper_mtc0_performance7(arg);
8074 rn
= "Performance7";
8075 goto cp0_unimplemented
;
8077 goto cp0_unimplemented
;
8083 gen_helper_mtc0_errctl(cpu_env
, arg
);
8084 ctx
->base
.is_jmp
= DISAS_STOP
;
8088 goto cp0_unimplemented
;
8101 goto cp0_unimplemented
;
8110 gen_helper_mtc0_taglo(cpu_env
, arg
);
8117 gen_helper_mtc0_datalo(cpu_env
, arg
);
8121 goto cp0_unimplemented
;
8130 gen_helper_mtc0_taghi(cpu_env
, arg
);
8137 gen_helper_mtc0_datahi(cpu_env
, arg
);
8142 goto cp0_unimplemented
;
8148 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8152 goto cp0_unimplemented
;
8159 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8168 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8169 tcg_gen_st_tl(arg
, cpu_env
,
8170 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8174 goto cp0_unimplemented
;
8178 goto cp0_unimplemented
;
8180 trace_mips_translate_c0("mtc0", rn
, reg
, sel
);
8182 /* For simplicity assume that all writes can cause interrupts. */
8183 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8185 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8186 * translated code to check for pending interrupts. */
8187 gen_save_pc(ctx
->base
.pc_next
+ 4);
8188 ctx
->base
.is_jmp
= DISAS_EXIT
;
8193 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8196 #if defined(TARGET_MIPS64)
8197 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8199 const char *rn
= "invalid";
8202 check_insn(ctx
, ISA_MIPS64
);
8208 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8212 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8213 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8217 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8218 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8223 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8232 goto cp0_unimplemented
;
8238 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8239 gen_helper_mfc0_random(arg
, cpu_env
);
8243 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8248 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8249 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8258 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8259 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8263 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8264 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8268 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8269 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8270 rn
= "VPEScheFBack";
8273 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8274 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8278 goto cp0_unimplemented
;
8284 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8288 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8289 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8293 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8294 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8298 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8299 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8303 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8304 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8308 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8309 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8313 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8314 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8318 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8319 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8323 goto cp0_unimplemented
;
8329 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8335 rn
= "GlobalNumber";
8338 goto cp0_unimplemented
;
8344 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8348 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8349 rn
= "ContextConfig";
8350 goto cp0_unimplemented
;
8352 CP0_CHECK(ctx
->ulri
);
8353 tcg_gen_ld_tl(arg
, cpu_env
,
8354 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8358 goto cp0_unimplemented
;
8364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8368 check_insn(ctx
, ISA_MIPS32R2
);
8369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8374 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8384 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8389 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8394 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8399 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8403 goto cp0_unimplemented
;
8409 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8413 check_insn(ctx
, ISA_MIPS32R2
);
8414 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8418 check_insn(ctx
, ISA_MIPS32R2
);
8419 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8423 check_insn(ctx
, ISA_MIPS32R2
);
8424 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8428 check_insn(ctx
, ISA_MIPS32R2
);
8429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8433 check_insn(ctx
, ISA_MIPS32R2
);
8434 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8439 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8443 goto cp0_unimplemented
;
8449 check_insn(ctx
, ISA_MIPS32R2
);
8450 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8454 goto cp0_unimplemented
;
8460 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8470 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8476 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8480 goto cp0_unimplemented
;
8486 /* Mark as an IO operation because we read the time. */
8487 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8490 gen_helper_mfc0_count(arg
, cpu_env
);
8491 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8494 /* Break the TB to be able to take timer interrupts immediately
8495 after reading count. DISAS_STOP isn't sufficient, we need to
8496 ensure we break completely out of translated code. */
8497 gen_save_pc(ctx
->base
.pc_next
+ 4);
8498 ctx
->base
.is_jmp
= DISAS_EXIT
;
8501 /* 6,7 are implementation dependent */
8503 goto cp0_unimplemented
;
8509 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8513 goto cp0_unimplemented
;
8519 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8522 /* 6,7 are implementation dependent */
8524 goto cp0_unimplemented
;
8530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8534 check_insn(ctx
, ISA_MIPS32R2
);
8535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8539 check_insn(ctx
, ISA_MIPS32R2
);
8540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8544 check_insn(ctx
, ISA_MIPS32R2
);
8545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8549 goto cp0_unimplemented
;
8555 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8559 goto cp0_unimplemented
;
8565 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8569 goto cp0_unimplemented
;
8575 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8579 check_insn(ctx
, ISA_MIPS32R2
);
8580 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8584 check_insn(ctx
, ISA_MIPS32R2
);
8585 CP0_CHECK(ctx
->cmgcr
);
8586 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8590 goto cp0_unimplemented
;
8596 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8600 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8612 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8619 /* 6,7 are implementation dependent */
8621 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8625 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8629 goto cp0_unimplemented
;
8635 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8639 CP0_CHECK(ctx
->mrp
);
8640 gen_helper_dmfc0_maar(arg
, cpu_env
);
8644 CP0_CHECK(ctx
->mrp
);
8645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8649 goto cp0_unimplemented
;
8662 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8663 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8667 goto cp0_unimplemented
;
8680 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8681 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8685 goto cp0_unimplemented
;
8691 check_insn(ctx
, ISA_MIPS3
);
8692 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8696 goto cp0_unimplemented
;
8700 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8701 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8704 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8708 goto cp0_unimplemented
;
8712 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8713 rn
= "'Diagnostic"; /* implementation dependent */
8718 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8722 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8723 rn
= "TraceControl";
8724 goto cp0_unimplemented
;
8726 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8727 rn
= "TraceControl2";
8728 goto cp0_unimplemented
;
8730 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8731 rn
= "UserTraceData";
8732 goto cp0_unimplemented
;
8734 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8736 goto cp0_unimplemented
;
8738 goto cp0_unimplemented
;
8745 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8749 goto cp0_unimplemented
;
8755 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8756 rn
= "Performance0";
8759 // gen_helper_dmfc0_performance1(arg);
8760 rn
= "Performance1";
8761 goto cp0_unimplemented
;
8763 // gen_helper_dmfc0_performance2(arg);
8764 rn
= "Performance2";
8765 goto cp0_unimplemented
;
8767 // gen_helper_dmfc0_performance3(arg);
8768 rn
= "Performance3";
8769 goto cp0_unimplemented
;
8771 // gen_helper_dmfc0_performance4(arg);
8772 rn
= "Performance4";
8773 goto cp0_unimplemented
;
8775 // gen_helper_dmfc0_performance5(arg);
8776 rn
= "Performance5";
8777 goto cp0_unimplemented
;
8779 // gen_helper_dmfc0_performance6(arg);
8780 rn
= "Performance6";
8781 goto cp0_unimplemented
;
8783 // gen_helper_dmfc0_performance7(arg);
8784 rn
= "Performance7";
8785 goto cp0_unimplemented
;
8787 goto cp0_unimplemented
;
8793 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8797 goto cp0_unimplemented
;
8807 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8811 goto cp0_unimplemented
;
8820 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8827 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8831 goto cp0_unimplemented
;
8840 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8847 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8851 goto cp0_unimplemented
;
8857 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8861 goto cp0_unimplemented
;
8868 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8877 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8878 tcg_gen_ld_tl(arg
, cpu_env
,
8879 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8883 goto cp0_unimplemented
;
8887 goto cp0_unimplemented
;
8889 trace_mips_translate_c0("dmfc0", rn
, reg
, sel
);
8893 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
8894 gen_mfc0_unimplemented(ctx
, arg
);
8897 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8899 const char *rn
= "invalid";
8902 check_insn(ctx
, ISA_MIPS64
);
8904 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8912 gen_helper_mtc0_index(cpu_env
, arg
);
8916 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8917 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8921 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8936 goto cp0_unimplemented
;
8946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8947 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
8951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8952 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
8956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8957 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
8961 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8962 gen_helper_mtc0_yqmask(cpu_env
, arg
);
8966 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8967 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8971 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8972 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8973 rn
= "VPEScheFBack";
8976 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8977 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
8981 goto cp0_unimplemented
;
8987 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
8991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8992 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
8996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8997 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9002 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9007 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9012 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9016 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9017 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9022 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9026 goto cp0_unimplemented
;
9032 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9038 rn
= "GlobalNumber";
9041 goto cp0_unimplemented
;
9047 gen_helper_mtc0_context(cpu_env
, arg
);
9051 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9052 rn
= "ContextConfig";
9053 goto cp0_unimplemented
;
9055 CP0_CHECK(ctx
->ulri
);
9056 tcg_gen_st_tl(arg
, cpu_env
,
9057 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9061 goto cp0_unimplemented
;
9067 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9071 check_insn(ctx
, ISA_MIPS32R2
);
9072 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9077 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9082 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9087 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9092 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9097 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9102 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9106 goto cp0_unimplemented
;
9112 gen_helper_mtc0_wired(cpu_env
, arg
);
9116 check_insn(ctx
, ISA_MIPS32R2
);
9117 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9121 check_insn(ctx
, ISA_MIPS32R2
);
9122 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9126 check_insn(ctx
, ISA_MIPS32R2
);
9127 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9131 check_insn(ctx
, ISA_MIPS32R2
);
9132 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9136 check_insn(ctx
, ISA_MIPS32R2
);
9137 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9142 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9146 goto cp0_unimplemented
;
9152 check_insn(ctx
, ISA_MIPS32R2
);
9153 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9154 ctx
->base
.is_jmp
= DISAS_STOP
;
9158 goto cp0_unimplemented
;
9180 goto cp0_unimplemented
;
9186 gen_helper_mtc0_count(cpu_env
, arg
);
9189 /* 6,7 are implementation dependent */
9191 goto cp0_unimplemented
;
9193 /* Stop translation as we may have switched the execution mode */
9194 ctx
->base
.is_jmp
= DISAS_STOP
;
9199 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9203 goto cp0_unimplemented
;
9209 gen_helper_mtc0_compare(cpu_env
, arg
);
9212 /* 6,7 are implementation dependent */
9214 goto cp0_unimplemented
;
9216 /* Stop translation as we may have switched the execution mode */
9217 ctx
->base
.is_jmp
= DISAS_STOP
;
9222 save_cpu_state(ctx
, 1);
9223 gen_helper_mtc0_status(cpu_env
, arg
);
9224 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9225 gen_save_pc(ctx
->base
.pc_next
+ 4);
9226 ctx
->base
.is_jmp
= DISAS_EXIT
;
9230 check_insn(ctx
, ISA_MIPS32R2
);
9231 gen_helper_mtc0_intctl(cpu_env
, arg
);
9232 /* Stop translation as we may have switched the execution mode */
9233 ctx
->base
.is_jmp
= DISAS_STOP
;
9237 check_insn(ctx
, ISA_MIPS32R2
);
9238 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9239 /* Stop translation as we may have switched the execution mode */
9240 ctx
->base
.is_jmp
= DISAS_STOP
;
9244 check_insn(ctx
, ISA_MIPS32R2
);
9245 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9246 /* Stop translation as we may have switched the execution mode */
9247 ctx
->base
.is_jmp
= DISAS_STOP
;
9251 goto cp0_unimplemented
;
9257 save_cpu_state(ctx
, 1);
9258 gen_helper_mtc0_cause(cpu_env
, arg
);
9259 /* Stop translation as we may have triggered an interrupt.
9260 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9261 * translated code to check for pending interrupts. */
9262 gen_save_pc(ctx
->base
.pc_next
+ 4);
9263 ctx
->base
.is_jmp
= DISAS_EXIT
;
9267 goto cp0_unimplemented
;
9273 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9277 goto cp0_unimplemented
;
9287 check_insn(ctx
, ISA_MIPS32R2
);
9288 gen_helper_mtc0_ebase(cpu_env
, arg
);
9292 goto cp0_unimplemented
;
9298 gen_helper_mtc0_config0(cpu_env
, arg
);
9300 /* Stop translation as we may have switched the execution mode */
9301 ctx
->base
.is_jmp
= DISAS_STOP
;
9304 /* ignored, read only */
9308 gen_helper_mtc0_config2(cpu_env
, arg
);
9310 /* Stop translation as we may have switched the execution mode */
9311 ctx
->base
.is_jmp
= DISAS_STOP
;
9314 gen_helper_mtc0_config3(cpu_env
, arg
);
9316 /* Stop translation as we may have switched the execution mode */
9317 ctx
->base
.is_jmp
= DISAS_STOP
;
9320 /* currently ignored */
9324 gen_helper_mtc0_config5(cpu_env
, arg
);
9326 /* Stop translation as we may have switched the execution mode */
9327 ctx
->base
.is_jmp
= DISAS_STOP
;
9329 /* 6,7 are implementation dependent */
9331 rn
= "Invalid config selector";
9332 goto cp0_unimplemented
;
9338 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9342 CP0_CHECK(ctx
->mrp
);
9343 gen_helper_mtc0_maar(cpu_env
, arg
);
9347 CP0_CHECK(ctx
->mrp
);
9348 gen_helper_mtc0_maari(cpu_env
, arg
);
9352 goto cp0_unimplemented
;
9365 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9366 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9370 goto cp0_unimplemented
;
9383 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9384 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9388 goto cp0_unimplemented
;
9394 check_insn(ctx
, ISA_MIPS3
);
9395 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9399 goto cp0_unimplemented
;
9403 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9404 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9407 gen_helper_mtc0_framemask(cpu_env
, arg
);
9411 goto cp0_unimplemented
;
9416 rn
= "Diagnostic"; /* implementation dependent */
9421 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9422 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9423 gen_save_pc(ctx
->base
.pc_next
+ 4);
9424 ctx
->base
.is_jmp
= DISAS_EXIT
;
9428 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9429 /* Stop translation as we may have switched the execution mode */
9430 ctx
->base
.is_jmp
= DISAS_STOP
;
9431 rn
= "TraceControl";
9432 goto cp0_unimplemented
;
9434 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9435 /* Stop translation as we may have switched the execution mode */
9436 ctx
->base
.is_jmp
= DISAS_STOP
;
9437 rn
= "TraceControl2";
9438 goto cp0_unimplemented
;
9440 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9441 /* Stop translation as we may have switched the execution mode */
9442 ctx
->base
.is_jmp
= DISAS_STOP
;
9443 rn
= "UserTraceData";
9444 goto cp0_unimplemented
;
9446 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9447 /* Stop translation as we may have switched the execution mode */
9448 ctx
->base
.is_jmp
= DISAS_STOP
;
9450 goto cp0_unimplemented
;
9452 goto cp0_unimplemented
;
9459 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9463 goto cp0_unimplemented
;
9469 gen_helper_mtc0_performance0(cpu_env
, arg
);
9470 rn
= "Performance0";
9473 // gen_helper_mtc0_performance1(cpu_env, arg);
9474 rn
= "Performance1";
9475 goto cp0_unimplemented
;
9477 // gen_helper_mtc0_performance2(cpu_env, arg);
9478 rn
= "Performance2";
9479 goto cp0_unimplemented
;
9481 // gen_helper_mtc0_performance3(cpu_env, arg);
9482 rn
= "Performance3";
9483 goto cp0_unimplemented
;
9485 // gen_helper_mtc0_performance4(cpu_env, arg);
9486 rn
= "Performance4";
9487 goto cp0_unimplemented
;
9489 // gen_helper_mtc0_performance5(cpu_env, arg);
9490 rn
= "Performance5";
9491 goto cp0_unimplemented
;
9493 // gen_helper_mtc0_performance6(cpu_env, arg);
9494 rn
= "Performance6";
9495 goto cp0_unimplemented
;
9497 // gen_helper_mtc0_performance7(cpu_env, arg);
9498 rn
= "Performance7";
9499 goto cp0_unimplemented
;
9501 goto cp0_unimplemented
;
9507 gen_helper_mtc0_errctl(cpu_env
, arg
);
9508 ctx
->base
.is_jmp
= DISAS_STOP
;
9512 goto cp0_unimplemented
;
9525 goto cp0_unimplemented
;
9534 gen_helper_mtc0_taglo(cpu_env
, arg
);
9541 gen_helper_mtc0_datalo(cpu_env
, arg
);
9545 goto cp0_unimplemented
;
9554 gen_helper_mtc0_taghi(cpu_env
, arg
);
9561 gen_helper_mtc0_datahi(cpu_env
, arg
);
9566 goto cp0_unimplemented
;
9572 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9576 goto cp0_unimplemented
;
9583 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9592 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9593 tcg_gen_st_tl(arg
, cpu_env
,
9594 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9598 goto cp0_unimplemented
;
9602 goto cp0_unimplemented
;
9604 trace_mips_translate_c0("dmtc0", rn
, reg
, sel
);
9606 /* For simplicity assume that all writes can cause interrupts. */
9607 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9609 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9610 * translated code to check for pending interrupts. */
9611 gen_save_pc(ctx
->base
.pc_next
+ 4);
9612 ctx
->base
.is_jmp
= DISAS_EXIT
;
9617 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
9619 #endif /* TARGET_MIPS64 */
9621 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9622 int u
, int sel
, int h
)
9624 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9625 TCGv t0
= tcg_temp_local_new();
9627 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9628 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9629 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9630 tcg_gen_movi_tl(t0
, -1);
9631 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9632 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9633 tcg_gen_movi_tl(t0
, -1);
9639 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9642 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9652 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9655 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9658 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9661 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9664 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9667 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9670 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9673 gen_mfc0(ctx
, t0
, rt
, sel
);
9680 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9683 gen_mfc0(ctx
, t0
, rt
, sel
);
9689 gen_helper_mftc0_status(t0
, cpu_env
);
9692 gen_mfc0(ctx
, t0
, rt
, sel
);
9698 gen_helper_mftc0_cause(t0
, cpu_env
);
9708 gen_helper_mftc0_epc(t0
, cpu_env
);
9718 gen_helper_mftc0_ebase(t0
, cpu_env
);
9735 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9745 gen_helper_mftc0_debug(t0
, cpu_env
);
9748 gen_mfc0(ctx
, t0
, rt
, sel
);
9753 gen_mfc0(ctx
, t0
, rt
, sel
);
9755 } else switch (sel
) {
9756 /* GPR registers. */
9758 gen_helper_1e0i(mftgpr
, t0
, rt
);
9760 /* Auxiliary CPU registers */
9764 gen_helper_1e0i(mftlo
, t0
, 0);
9767 gen_helper_1e0i(mfthi
, t0
, 0);
9770 gen_helper_1e0i(mftacx
, t0
, 0);
9773 gen_helper_1e0i(mftlo
, t0
, 1);
9776 gen_helper_1e0i(mfthi
, t0
, 1);
9779 gen_helper_1e0i(mftacx
, t0
, 1);
9782 gen_helper_1e0i(mftlo
, t0
, 2);
9785 gen_helper_1e0i(mfthi
, t0
, 2);
9788 gen_helper_1e0i(mftacx
, t0
, 2);
9791 gen_helper_1e0i(mftlo
, t0
, 3);
9794 gen_helper_1e0i(mfthi
, t0
, 3);
9797 gen_helper_1e0i(mftacx
, t0
, 3);
9800 gen_helper_mftdsp(t0
, cpu_env
);
9806 /* Floating point (COP1). */
9808 /* XXX: For now we support only a single FPU context. */
9810 TCGv_i32 fp0
= tcg_temp_new_i32();
9812 gen_load_fpr32(ctx
, fp0
, rt
);
9813 tcg_gen_ext_i32_tl(t0
, fp0
);
9814 tcg_temp_free_i32(fp0
);
9816 TCGv_i32 fp0
= tcg_temp_new_i32();
9818 gen_load_fpr32h(ctx
, fp0
, rt
);
9819 tcg_gen_ext_i32_tl(t0
, fp0
);
9820 tcg_temp_free_i32(fp0
);
9824 /* XXX: For now we support only a single FPU context. */
9825 gen_helper_1e0i(cfc1
, t0
, rt
);
9827 /* COP2: Not implemented. */
9834 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9835 gen_store_gpr(t0
, rd
);
9841 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9842 generate_exception_end(ctx
, EXCP_RI
);
9845 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9846 int u
, int sel
, int h
)
9848 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9849 TCGv t0
= tcg_temp_local_new();
9851 gen_load_gpr(t0
, rt
);
9852 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9853 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9854 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
9856 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9857 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
9864 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9867 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9877 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9880 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9883 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9886 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9889 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9892 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9895 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9898 gen_mtc0(ctx
, t0
, rd
, sel
);
9905 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9908 gen_mtc0(ctx
, t0
, rd
, sel
);
9914 gen_helper_mttc0_status(cpu_env
, t0
);
9917 gen_mtc0(ctx
, t0
, rd
, sel
);
9923 gen_helper_mttc0_cause(cpu_env
, t0
);
9933 gen_helper_mttc0_ebase(cpu_env
, t0
);
9943 gen_helper_mttc0_debug(cpu_env
, t0
);
9946 gen_mtc0(ctx
, t0
, rd
, sel
);
9951 gen_mtc0(ctx
, t0
, rd
, sel
);
9953 } else switch (sel
) {
9954 /* GPR registers. */
9956 gen_helper_0e1i(mttgpr
, t0
, rd
);
9958 /* Auxiliary CPU registers */
9962 gen_helper_0e1i(mttlo
, t0
, 0);
9965 gen_helper_0e1i(mtthi
, t0
, 0);
9968 gen_helper_0e1i(mttacx
, t0
, 0);
9971 gen_helper_0e1i(mttlo
, t0
, 1);
9974 gen_helper_0e1i(mtthi
, t0
, 1);
9977 gen_helper_0e1i(mttacx
, t0
, 1);
9980 gen_helper_0e1i(mttlo
, t0
, 2);
9983 gen_helper_0e1i(mtthi
, t0
, 2);
9986 gen_helper_0e1i(mttacx
, t0
, 2);
9989 gen_helper_0e1i(mttlo
, t0
, 3);
9992 gen_helper_0e1i(mtthi
, t0
, 3);
9995 gen_helper_0e1i(mttacx
, t0
, 3);
9998 gen_helper_mttdsp(cpu_env
, t0
);
10004 /* Floating point (COP1). */
10006 /* XXX: For now we support only a single FPU context. */
10008 TCGv_i32 fp0
= tcg_temp_new_i32();
10010 tcg_gen_trunc_tl_i32(fp0
, t0
);
10011 gen_store_fpr32(ctx
, fp0
, rd
);
10012 tcg_temp_free_i32(fp0
);
10014 TCGv_i32 fp0
= tcg_temp_new_i32();
10016 tcg_gen_trunc_tl_i32(fp0
, t0
);
10017 gen_store_fpr32h(ctx
, fp0
, rd
);
10018 tcg_temp_free_i32(fp0
);
10022 /* XXX: For now we support only a single FPU context. */
10024 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10026 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10027 tcg_temp_free_i32(fs_tmp
);
10029 /* Stop translation as we may have changed hflags */
10030 ctx
->base
.is_jmp
= DISAS_STOP
;
10032 /* COP2: Not implemented. */
10039 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10045 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10046 generate_exception_end(ctx
, EXCP_RI
);
10049 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
10051 const char *opn
= "ldst";
10053 check_cp0_enabled(ctx
);
10057 /* Treat as NOP. */
10060 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10065 TCGv t0
= tcg_temp_new();
10067 gen_load_gpr(t0
, rt
);
10068 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10073 #if defined(TARGET_MIPS64)
10075 check_insn(ctx
, ISA_MIPS3
);
10077 /* Treat as NOP. */
10080 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10084 check_insn(ctx
, ISA_MIPS3
);
10086 TCGv t0
= tcg_temp_new();
10088 gen_load_gpr(t0
, rt
);
10089 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10098 /* Treat as NOP. */
10101 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10107 TCGv t0
= tcg_temp_new();
10108 gen_load_gpr(t0
, rt
);
10109 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10115 check_cp0_enabled(ctx
);
10117 /* Treat as NOP. */
10120 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10121 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10125 check_cp0_enabled(ctx
);
10126 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10127 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10132 if (!env
->tlb
->helper_tlbwi
)
10134 gen_helper_tlbwi(cpu_env
);
10138 if (ctx
->ie
>= 2) {
10139 if (!env
->tlb
->helper_tlbinv
) {
10142 gen_helper_tlbinv(cpu_env
);
10143 } /* treat as nop if TLBINV not supported */
10147 if (ctx
->ie
>= 2) {
10148 if (!env
->tlb
->helper_tlbinvf
) {
10151 gen_helper_tlbinvf(cpu_env
);
10152 } /* treat as nop if TLBINV not supported */
10156 if (!env
->tlb
->helper_tlbwr
)
10158 gen_helper_tlbwr(cpu_env
);
10162 if (!env
->tlb
->helper_tlbp
)
10164 gen_helper_tlbp(cpu_env
);
10168 if (!env
->tlb
->helper_tlbr
)
10170 gen_helper_tlbr(cpu_env
);
10172 case OPC_ERET
: /* OPC_ERETNC */
10173 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10174 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10177 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10178 if (ctx
->opcode
& (1 << bit_shift
)) {
10181 check_insn(ctx
, ISA_MIPS32R5
);
10182 gen_helper_eretnc(cpu_env
);
10186 check_insn(ctx
, ISA_MIPS2
);
10187 gen_helper_eret(cpu_env
);
10189 ctx
->base
.is_jmp
= DISAS_EXIT
;
10194 check_insn(ctx
, ISA_MIPS32
);
10195 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10196 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10199 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10201 generate_exception_end(ctx
, EXCP_RI
);
10203 gen_helper_deret(cpu_env
);
10204 ctx
->base
.is_jmp
= DISAS_EXIT
;
10209 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10210 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10211 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10214 /* If we get an exception, we want to restart at next instruction */
10215 ctx
->base
.pc_next
+= 4;
10216 save_cpu_state(ctx
, 1);
10217 ctx
->base
.pc_next
-= 4;
10218 gen_helper_wait(cpu_env
);
10219 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10224 generate_exception_end(ctx
, EXCP_RI
);
10227 (void)opn
; /* avoid a compiler warning */
10229 #endif /* !CONFIG_USER_ONLY */
10231 /* CP1 Branches (before delay slot) */
10232 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10233 int32_t cc
, int32_t offset
)
10235 target_ulong btarget
;
10236 TCGv_i32 t0
= tcg_temp_new_i32();
10238 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10239 generate_exception_end(ctx
, EXCP_RI
);
10244 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10246 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10250 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10251 tcg_gen_not_i32(t0
, t0
);
10252 tcg_gen_andi_i32(t0
, t0
, 1);
10253 tcg_gen_extu_i32_tl(bcond
, t0
);
10256 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10257 tcg_gen_not_i32(t0
, t0
);
10258 tcg_gen_andi_i32(t0
, t0
, 1);
10259 tcg_gen_extu_i32_tl(bcond
, t0
);
10262 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10263 tcg_gen_andi_i32(t0
, t0
, 1);
10264 tcg_gen_extu_i32_tl(bcond
, t0
);
10267 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10268 tcg_gen_andi_i32(t0
, t0
, 1);
10269 tcg_gen_extu_i32_tl(bcond
, t0
);
10271 ctx
->hflags
|= MIPS_HFLAG_BL
;
10275 TCGv_i32 t1
= tcg_temp_new_i32();
10276 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10277 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10278 tcg_gen_nand_i32(t0
, t0
, t1
);
10279 tcg_temp_free_i32(t1
);
10280 tcg_gen_andi_i32(t0
, t0
, 1);
10281 tcg_gen_extu_i32_tl(bcond
, t0
);
10286 TCGv_i32 t1
= tcg_temp_new_i32();
10287 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10288 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10289 tcg_gen_or_i32(t0
, t0
, t1
);
10290 tcg_temp_free_i32(t1
);
10291 tcg_gen_andi_i32(t0
, t0
, 1);
10292 tcg_gen_extu_i32_tl(bcond
, t0
);
10297 TCGv_i32 t1
= tcg_temp_new_i32();
10298 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10299 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10300 tcg_gen_and_i32(t0
, t0
, t1
);
10301 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10302 tcg_gen_and_i32(t0
, t0
, t1
);
10303 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10304 tcg_gen_nand_i32(t0
, t0
, t1
);
10305 tcg_temp_free_i32(t1
);
10306 tcg_gen_andi_i32(t0
, t0
, 1);
10307 tcg_gen_extu_i32_tl(bcond
, t0
);
10312 TCGv_i32 t1
= tcg_temp_new_i32();
10313 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10314 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10315 tcg_gen_or_i32(t0
, t0
, t1
);
10316 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10317 tcg_gen_or_i32(t0
, t0
, t1
);
10318 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10319 tcg_gen_or_i32(t0
, t0
, t1
);
10320 tcg_temp_free_i32(t1
);
10321 tcg_gen_andi_i32(t0
, t0
, 1);
10322 tcg_gen_extu_i32_tl(bcond
, t0
);
10325 ctx
->hflags
|= MIPS_HFLAG_BC
;
10328 MIPS_INVAL("cp1 cond branch");
10329 generate_exception_end(ctx
, EXCP_RI
);
10332 ctx
->btarget
= btarget
;
10333 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10335 tcg_temp_free_i32(t0
);
10338 /* R6 CP1 Branches */
10339 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10340 int32_t ft
, int32_t offset
,
10341 int delayslot_size
)
10343 target_ulong btarget
;
10344 TCGv_i64 t0
= tcg_temp_new_i64();
10346 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10347 #ifdef MIPS_DEBUG_DISAS
10348 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10349 "\n", ctx
->base
.pc_next
);
10351 generate_exception_end(ctx
, EXCP_RI
);
10355 gen_load_fpr64(ctx
, t0
, ft
);
10356 tcg_gen_andi_i64(t0
, t0
, 1);
10358 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10362 tcg_gen_xori_i64(t0
, t0
, 1);
10363 ctx
->hflags
|= MIPS_HFLAG_BC
;
10366 /* t0 already set */
10367 ctx
->hflags
|= MIPS_HFLAG_BC
;
10370 MIPS_INVAL("cp1 cond branch");
10371 generate_exception_end(ctx
, EXCP_RI
);
10375 tcg_gen_trunc_i64_tl(bcond
, t0
);
10377 ctx
->btarget
= btarget
;
10379 switch (delayslot_size
) {
10381 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10384 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10389 tcg_temp_free_i64(t0
);
10392 /* Coprocessor 1 (FPU) */
10394 #define FOP(func, fmt) (((fmt) << 21) | (func))
10397 OPC_ADD_S
= FOP(0, FMT_S
),
10398 OPC_SUB_S
= FOP(1, FMT_S
),
10399 OPC_MUL_S
= FOP(2, FMT_S
),
10400 OPC_DIV_S
= FOP(3, FMT_S
),
10401 OPC_SQRT_S
= FOP(4, FMT_S
),
10402 OPC_ABS_S
= FOP(5, FMT_S
),
10403 OPC_MOV_S
= FOP(6, FMT_S
),
10404 OPC_NEG_S
= FOP(7, FMT_S
),
10405 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10406 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10407 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10408 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10409 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10410 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10411 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10412 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10413 OPC_SEL_S
= FOP(16, FMT_S
),
10414 OPC_MOVCF_S
= FOP(17, FMT_S
),
10415 OPC_MOVZ_S
= FOP(18, FMT_S
),
10416 OPC_MOVN_S
= FOP(19, FMT_S
),
10417 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10418 OPC_RECIP_S
= FOP(21, FMT_S
),
10419 OPC_RSQRT_S
= FOP(22, FMT_S
),
10420 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10421 OPC_MADDF_S
= FOP(24, FMT_S
),
10422 OPC_MSUBF_S
= FOP(25, FMT_S
),
10423 OPC_RINT_S
= FOP(26, FMT_S
),
10424 OPC_CLASS_S
= FOP(27, FMT_S
),
10425 OPC_MIN_S
= FOP(28, FMT_S
),
10426 OPC_RECIP2_S
= FOP(28, FMT_S
),
10427 OPC_MINA_S
= FOP(29, FMT_S
),
10428 OPC_RECIP1_S
= FOP(29, FMT_S
),
10429 OPC_MAX_S
= FOP(30, FMT_S
),
10430 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10431 OPC_MAXA_S
= FOP(31, FMT_S
),
10432 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10433 OPC_CVT_D_S
= FOP(33, FMT_S
),
10434 OPC_CVT_W_S
= FOP(36, FMT_S
),
10435 OPC_CVT_L_S
= FOP(37, FMT_S
),
10436 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10437 OPC_CMP_F_S
= FOP (48, FMT_S
),
10438 OPC_CMP_UN_S
= FOP (49, FMT_S
),
10439 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
10440 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
10441 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
10442 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
10443 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
10444 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
10445 OPC_CMP_SF_S
= FOP (56, FMT_S
),
10446 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
10447 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
10448 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
10449 OPC_CMP_LT_S
= FOP (60, FMT_S
),
10450 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
10451 OPC_CMP_LE_S
= FOP (62, FMT_S
),
10452 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
10454 OPC_ADD_D
= FOP(0, FMT_D
),
10455 OPC_SUB_D
= FOP(1, FMT_D
),
10456 OPC_MUL_D
= FOP(2, FMT_D
),
10457 OPC_DIV_D
= FOP(3, FMT_D
),
10458 OPC_SQRT_D
= FOP(4, FMT_D
),
10459 OPC_ABS_D
= FOP(5, FMT_D
),
10460 OPC_MOV_D
= FOP(6, FMT_D
),
10461 OPC_NEG_D
= FOP(7, FMT_D
),
10462 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10463 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10464 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10465 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10466 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10467 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10468 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10469 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10470 OPC_SEL_D
= FOP(16, FMT_D
),
10471 OPC_MOVCF_D
= FOP(17, FMT_D
),
10472 OPC_MOVZ_D
= FOP(18, FMT_D
),
10473 OPC_MOVN_D
= FOP(19, FMT_D
),
10474 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10475 OPC_RECIP_D
= FOP(21, FMT_D
),
10476 OPC_RSQRT_D
= FOP(22, FMT_D
),
10477 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10478 OPC_MADDF_D
= FOP(24, FMT_D
),
10479 OPC_MSUBF_D
= FOP(25, FMT_D
),
10480 OPC_RINT_D
= FOP(26, FMT_D
),
10481 OPC_CLASS_D
= FOP(27, FMT_D
),
10482 OPC_MIN_D
= FOP(28, FMT_D
),
10483 OPC_RECIP2_D
= FOP(28, FMT_D
),
10484 OPC_MINA_D
= FOP(29, FMT_D
),
10485 OPC_RECIP1_D
= FOP(29, FMT_D
),
10486 OPC_MAX_D
= FOP(30, FMT_D
),
10487 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10488 OPC_MAXA_D
= FOP(31, FMT_D
),
10489 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10490 OPC_CVT_S_D
= FOP(32, FMT_D
),
10491 OPC_CVT_W_D
= FOP(36, FMT_D
),
10492 OPC_CVT_L_D
= FOP(37, FMT_D
),
10493 OPC_CMP_F_D
= FOP (48, FMT_D
),
10494 OPC_CMP_UN_D
= FOP (49, FMT_D
),
10495 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
10496 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
10497 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
10498 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
10499 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
10500 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
10501 OPC_CMP_SF_D
= FOP (56, FMT_D
),
10502 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
10503 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
10504 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
10505 OPC_CMP_LT_D
= FOP (60, FMT_D
),
10506 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
10507 OPC_CMP_LE_D
= FOP (62, FMT_D
),
10508 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
10510 OPC_CVT_S_W
= FOP(32, FMT_W
),
10511 OPC_CVT_D_W
= FOP(33, FMT_W
),
10512 OPC_CVT_S_L
= FOP(32, FMT_L
),
10513 OPC_CVT_D_L
= FOP(33, FMT_L
),
10514 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10516 OPC_ADD_PS
= FOP(0, FMT_PS
),
10517 OPC_SUB_PS
= FOP(1, FMT_PS
),
10518 OPC_MUL_PS
= FOP(2, FMT_PS
),
10519 OPC_DIV_PS
= FOP(3, FMT_PS
),
10520 OPC_ABS_PS
= FOP(5, FMT_PS
),
10521 OPC_MOV_PS
= FOP(6, FMT_PS
),
10522 OPC_NEG_PS
= FOP(7, FMT_PS
),
10523 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10524 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10525 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10526 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10527 OPC_MULR_PS
= FOP(26, FMT_PS
),
10528 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10529 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10530 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10531 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10533 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10534 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10535 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10536 OPC_PLL_PS
= FOP(44, FMT_PS
),
10537 OPC_PLU_PS
= FOP(45, FMT_PS
),
10538 OPC_PUL_PS
= FOP(46, FMT_PS
),
10539 OPC_PUU_PS
= FOP(47, FMT_PS
),
10540 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
10541 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
10542 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
10543 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
10544 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
10545 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
10546 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
10547 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
10548 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
10549 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
10550 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
10551 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
10552 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
10553 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
10554 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
10555 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
10559 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10560 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10561 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10562 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10563 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10564 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10565 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10566 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10567 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10568 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10569 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10570 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10571 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10572 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10573 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10574 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10575 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10576 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10577 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10578 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10579 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10580 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10582 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10583 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10584 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10585 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10586 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10587 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10588 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10589 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10590 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10591 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10592 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10593 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10594 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10595 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10596 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10597 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10598 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10599 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10600 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10601 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10602 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10603 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10605 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10607 TCGv t0
= tcg_temp_new();
10612 TCGv_i32 fp0
= tcg_temp_new_i32();
10614 gen_load_fpr32(ctx
, fp0
, fs
);
10615 tcg_gen_ext_i32_tl(t0
, fp0
);
10616 tcg_temp_free_i32(fp0
);
10618 gen_store_gpr(t0
, rt
);
10621 gen_load_gpr(t0
, rt
);
10623 TCGv_i32 fp0
= tcg_temp_new_i32();
10625 tcg_gen_trunc_tl_i32(fp0
, t0
);
10626 gen_store_fpr32(ctx
, fp0
, fs
);
10627 tcg_temp_free_i32(fp0
);
10631 gen_helper_1e0i(cfc1
, t0
, fs
);
10632 gen_store_gpr(t0
, rt
);
10635 gen_load_gpr(t0
, rt
);
10636 save_cpu_state(ctx
, 0);
10638 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10640 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10641 tcg_temp_free_i32(fs_tmp
);
10643 /* Stop translation as we may have changed hflags */
10644 ctx
->base
.is_jmp
= DISAS_STOP
;
10646 #if defined(TARGET_MIPS64)
10648 gen_load_fpr64(ctx
, t0
, fs
);
10649 gen_store_gpr(t0
, rt
);
10652 gen_load_gpr(t0
, rt
);
10653 gen_store_fpr64(ctx
, t0
, fs
);
10658 TCGv_i32 fp0
= tcg_temp_new_i32();
10660 gen_load_fpr32h(ctx
, fp0
, fs
);
10661 tcg_gen_ext_i32_tl(t0
, fp0
);
10662 tcg_temp_free_i32(fp0
);
10664 gen_store_gpr(t0
, rt
);
10667 gen_load_gpr(t0
, rt
);
10669 TCGv_i32 fp0
= tcg_temp_new_i32();
10671 tcg_gen_trunc_tl_i32(fp0
, t0
);
10672 gen_store_fpr32h(ctx
, fp0
, fs
);
10673 tcg_temp_free_i32(fp0
);
10677 MIPS_INVAL("cp1 move");
10678 generate_exception_end(ctx
, EXCP_RI
);
10686 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10693 /* Treat as NOP. */
10698 cond
= TCG_COND_EQ
;
10700 cond
= TCG_COND_NE
;
10702 l1
= gen_new_label();
10703 t0
= tcg_temp_new_i32();
10704 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10705 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10706 tcg_temp_free_i32(t0
);
10708 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10710 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10715 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10719 TCGv_i32 t0
= tcg_temp_new_i32();
10720 TCGLabel
*l1
= gen_new_label();
10723 cond
= TCG_COND_EQ
;
10725 cond
= TCG_COND_NE
;
10727 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10728 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10729 gen_load_fpr32(ctx
, t0
, fs
);
10730 gen_store_fpr32(ctx
, t0
, fd
);
10732 tcg_temp_free_i32(t0
);
10735 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
10738 TCGv_i32 t0
= tcg_temp_new_i32();
10740 TCGLabel
*l1
= gen_new_label();
10743 cond
= TCG_COND_EQ
;
10745 cond
= TCG_COND_NE
;
10747 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10748 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10749 tcg_temp_free_i32(t0
);
10750 fp0
= tcg_temp_new_i64();
10751 gen_load_fpr64(ctx
, fp0
, fs
);
10752 gen_store_fpr64(ctx
, fp0
, fd
);
10753 tcg_temp_free_i64(fp0
);
10757 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10761 TCGv_i32 t0
= tcg_temp_new_i32();
10762 TCGLabel
*l1
= gen_new_label();
10763 TCGLabel
*l2
= gen_new_label();
10766 cond
= TCG_COND_EQ
;
10768 cond
= TCG_COND_NE
;
10770 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10771 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10772 gen_load_fpr32(ctx
, t0
, fs
);
10773 gen_store_fpr32(ctx
, t0
, fd
);
10776 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10777 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10778 gen_load_fpr32h(ctx
, t0
, fs
);
10779 gen_store_fpr32h(ctx
, t0
, fd
);
10780 tcg_temp_free_i32(t0
);
10784 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10787 TCGv_i32 t1
= tcg_const_i32(0);
10788 TCGv_i32 fp0
= tcg_temp_new_i32();
10789 TCGv_i32 fp1
= tcg_temp_new_i32();
10790 TCGv_i32 fp2
= tcg_temp_new_i32();
10791 gen_load_fpr32(ctx
, fp0
, fd
);
10792 gen_load_fpr32(ctx
, fp1
, ft
);
10793 gen_load_fpr32(ctx
, fp2
, fs
);
10797 tcg_gen_andi_i32(fp0
, fp0
, 1);
10798 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10801 tcg_gen_andi_i32(fp1
, fp1
, 1);
10802 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10805 tcg_gen_andi_i32(fp1
, fp1
, 1);
10806 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10809 MIPS_INVAL("gen_sel_s");
10810 generate_exception_end(ctx
, EXCP_RI
);
10814 gen_store_fpr32(ctx
, fp0
, fd
);
10815 tcg_temp_free_i32(fp2
);
10816 tcg_temp_free_i32(fp1
);
10817 tcg_temp_free_i32(fp0
);
10818 tcg_temp_free_i32(t1
);
10821 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10824 TCGv_i64 t1
= tcg_const_i64(0);
10825 TCGv_i64 fp0
= tcg_temp_new_i64();
10826 TCGv_i64 fp1
= tcg_temp_new_i64();
10827 TCGv_i64 fp2
= tcg_temp_new_i64();
10828 gen_load_fpr64(ctx
, fp0
, fd
);
10829 gen_load_fpr64(ctx
, fp1
, ft
);
10830 gen_load_fpr64(ctx
, fp2
, fs
);
10834 tcg_gen_andi_i64(fp0
, fp0
, 1);
10835 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10838 tcg_gen_andi_i64(fp1
, fp1
, 1);
10839 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10842 tcg_gen_andi_i64(fp1
, fp1
, 1);
10843 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10846 MIPS_INVAL("gen_sel_d");
10847 generate_exception_end(ctx
, EXCP_RI
);
10851 gen_store_fpr64(ctx
, fp0
, fd
);
10852 tcg_temp_free_i64(fp2
);
10853 tcg_temp_free_i64(fp1
);
10854 tcg_temp_free_i64(fp0
);
10855 tcg_temp_free_i64(t1
);
10858 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
10859 int ft
, int fs
, int fd
, int cc
)
10861 uint32_t func
= ctx
->opcode
& 0x3f;
10865 TCGv_i32 fp0
= tcg_temp_new_i32();
10866 TCGv_i32 fp1
= tcg_temp_new_i32();
10868 gen_load_fpr32(ctx
, fp0
, fs
);
10869 gen_load_fpr32(ctx
, fp1
, ft
);
10870 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10871 tcg_temp_free_i32(fp1
);
10872 gen_store_fpr32(ctx
, fp0
, fd
);
10873 tcg_temp_free_i32(fp0
);
10878 TCGv_i32 fp0
= tcg_temp_new_i32();
10879 TCGv_i32 fp1
= tcg_temp_new_i32();
10881 gen_load_fpr32(ctx
, fp0
, fs
);
10882 gen_load_fpr32(ctx
, fp1
, ft
);
10883 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10884 tcg_temp_free_i32(fp1
);
10885 gen_store_fpr32(ctx
, fp0
, fd
);
10886 tcg_temp_free_i32(fp0
);
10891 TCGv_i32 fp0
= tcg_temp_new_i32();
10892 TCGv_i32 fp1
= tcg_temp_new_i32();
10894 gen_load_fpr32(ctx
, fp0
, fs
);
10895 gen_load_fpr32(ctx
, fp1
, ft
);
10896 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10897 tcg_temp_free_i32(fp1
);
10898 gen_store_fpr32(ctx
, fp0
, fd
);
10899 tcg_temp_free_i32(fp0
);
10904 TCGv_i32 fp0
= tcg_temp_new_i32();
10905 TCGv_i32 fp1
= tcg_temp_new_i32();
10907 gen_load_fpr32(ctx
, fp0
, fs
);
10908 gen_load_fpr32(ctx
, fp1
, ft
);
10909 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
10910 tcg_temp_free_i32(fp1
);
10911 gen_store_fpr32(ctx
, fp0
, fd
);
10912 tcg_temp_free_i32(fp0
);
10917 TCGv_i32 fp0
= tcg_temp_new_i32();
10919 gen_load_fpr32(ctx
, fp0
, fs
);
10920 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
10921 gen_store_fpr32(ctx
, fp0
, fd
);
10922 tcg_temp_free_i32(fp0
);
10927 TCGv_i32 fp0
= tcg_temp_new_i32();
10929 gen_load_fpr32(ctx
, fp0
, fs
);
10930 if (ctx
->abs2008
) {
10931 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
10933 gen_helper_float_abs_s(fp0
, fp0
);
10935 gen_store_fpr32(ctx
, fp0
, fd
);
10936 tcg_temp_free_i32(fp0
);
10941 TCGv_i32 fp0
= tcg_temp_new_i32();
10943 gen_load_fpr32(ctx
, fp0
, fs
);
10944 gen_store_fpr32(ctx
, fp0
, fd
);
10945 tcg_temp_free_i32(fp0
);
10950 TCGv_i32 fp0
= tcg_temp_new_i32();
10952 gen_load_fpr32(ctx
, fp0
, fs
);
10953 if (ctx
->abs2008
) {
10954 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
10956 gen_helper_float_chs_s(fp0
, fp0
);
10958 gen_store_fpr32(ctx
, fp0
, fd
);
10959 tcg_temp_free_i32(fp0
);
10962 case OPC_ROUND_L_S
:
10963 check_cp1_64bitmode(ctx
);
10965 TCGv_i32 fp32
= tcg_temp_new_i32();
10966 TCGv_i64 fp64
= tcg_temp_new_i64();
10968 gen_load_fpr32(ctx
, fp32
, fs
);
10969 if (ctx
->nan2008
) {
10970 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10972 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10974 tcg_temp_free_i32(fp32
);
10975 gen_store_fpr64(ctx
, fp64
, fd
);
10976 tcg_temp_free_i64(fp64
);
10979 case OPC_TRUNC_L_S
:
10980 check_cp1_64bitmode(ctx
);
10982 TCGv_i32 fp32
= tcg_temp_new_i32();
10983 TCGv_i64 fp64
= tcg_temp_new_i64();
10985 gen_load_fpr32(ctx
, fp32
, fs
);
10986 if (ctx
->nan2008
) {
10987 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10989 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10991 tcg_temp_free_i32(fp32
);
10992 gen_store_fpr64(ctx
, fp64
, fd
);
10993 tcg_temp_free_i64(fp64
);
10997 check_cp1_64bitmode(ctx
);
10999 TCGv_i32 fp32
= tcg_temp_new_i32();
11000 TCGv_i64 fp64
= tcg_temp_new_i64();
11002 gen_load_fpr32(ctx
, fp32
, fs
);
11003 if (ctx
->nan2008
) {
11004 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11006 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11008 tcg_temp_free_i32(fp32
);
11009 gen_store_fpr64(ctx
, fp64
, fd
);
11010 tcg_temp_free_i64(fp64
);
11013 case OPC_FLOOR_L_S
:
11014 check_cp1_64bitmode(ctx
);
11016 TCGv_i32 fp32
= tcg_temp_new_i32();
11017 TCGv_i64 fp64
= tcg_temp_new_i64();
11019 gen_load_fpr32(ctx
, fp32
, fs
);
11020 if (ctx
->nan2008
) {
11021 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11023 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11025 tcg_temp_free_i32(fp32
);
11026 gen_store_fpr64(ctx
, fp64
, fd
);
11027 tcg_temp_free_i64(fp64
);
11030 case OPC_ROUND_W_S
:
11032 TCGv_i32 fp0
= tcg_temp_new_i32();
11034 gen_load_fpr32(ctx
, fp0
, fs
);
11035 if (ctx
->nan2008
) {
11036 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11038 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11040 gen_store_fpr32(ctx
, fp0
, fd
);
11041 tcg_temp_free_i32(fp0
);
11044 case OPC_TRUNC_W_S
:
11046 TCGv_i32 fp0
= tcg_temp_new_i32();
11048 gen_load_fpr32(ctx
, fp0
, fs
);
11049 if (ctx
->nan2008
) {
11050 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11052 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11054 gen_store_fpr32(ctx
, fp0
, fd
);
11055 tcg_temp_free_i32(fp0
);
11060 TCGv_i32 fp0
= tcg_temp_new_i32();
11062 gen_load_fpr32(ctx
, fp0
, fs
);
11063 if (ctx
->nan2008
) {
11064 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11066 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11068 gen_store_fpr32(ctx
, fp0
, fd
);
11069 tcg_temp_free_i32(fp0
);
11072 case OPC_FLOOR_W_S
:
11074 TCGv_i32 fp0
= tcg_temp_new_i32();
11076 gen_load_fpr32(ctx
, fp0
, fs
);
11077 if (ctx
->nan2008
) {
11078 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11080 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11082 gen_store_fpr32(ctx
, fp0
, fd
);
11083 tcg_temp_free_i32(fp0
);
11087 check_insn(ctx
, ISA_MIPS32R6
);
11088 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11091 check_insn(ctx
, ISA_MIPS32R6
);
11092 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11095 check_insn(ctx
, ISA_MIPS32R6
);
11096 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11099 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11100 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11103 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11105 TCGLabel
*l1
= gen_new_label();
11109 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11111 fp0
= tcg_temp_new_i32();
11112 gen_load_fpr32(ctx
, fp0
, fs
);
11113 gen_store_fpr32(ctx
, fp0
, fd
);
11114 tcg_temp_free_i32(fp0
);
11119 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11121 TCGLabel
*l1
= gen_new_label();
11125 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11126 fp0
= tcg_temp_new_i32();
11127 gen_load_fpr32(ctx
, fp0
, fs
);
11128 gen_store_fpr32(ctx
, fp0
, fd
);
11129 tcg_temp_free_i32(fp0
);
11136 TCGv_i32 fp0
= tcg_temp_new_i32();
11138 gen_load_fpr32(ctx
, fp0
, fs
);
11139 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11140 gen_store_fpr32(ctx
, fp0
, fd
);
11141 tcg_temp_free_i32(fp0
);
11146 TCGv_i32 fp0
= tcg_temp_new_i32();
11148 gen_load_fpr32(ctx
, fp0
, fs
);
11149 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11150 gen_store_fpr32(ctx
, fp0
, fd
);
11151 tcg_temp_free_i32(fp0
);
11155 check_insn(ctx
, ISA_MIPS32R6
);
11157 TCGv_i32 fp0
= tcg_temp_new_i32();
11158 TCGv_i32 fp1
= tcg_temp_new_i32();
11159 TCGv_i32 fp2
= tcg_temp_new_i32();
11160 gen_load_fpr32(ctx
, fp0
, fs
);
11161 gen_load_fpr32(ctx
, fp1
, ft
);
11162 gen_load_fpr32(ctx
, fp2
, fd
);
11163 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11164 gen_store_fpr32(ctx
, fp2
, fd
);
11165 tcg_temp_free_i32(fp2
);
11166 tcg_temp_free_i32(fp1
);
11167 tcg_temp_free_i32(fp0
);
11171 check_insn(ctx
, ISA_MIPS32R6
);
11173 TCGv_i32 fp0
= tcg_temp_new_i32();
11174 TCGv_i32 fp1
= tcg_temp_new_i32();
11175 TCGv_i32 fp2
= tcg_temp_new_i32();
11176 gen_load_fpr32(ctx
, fp0
, fs
);
11177 gen_load_fpr32(ctx
, fp1
, ft
);
11178 gen_load_fpr32(ctx
, fp2
, fd
);
11179 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11180 gen_store_fpr32(ctx
, fp2
, fd
);
11181 tcg_temp_free_i32(fp2
);
11182 tcg_temp_free_i32(fp1
);
11183 tcg_temp_free_i32(fp0
);
11187 check_insn(ctx
, ISA_MIPS32R6
);
11189 TCGv_i32 fp0
= tcg_temp_new_i32();
11190 gen_load_fpr32(ctx
, fp0
, fs
);
11191 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11192 gen_store_fpr32(ctx
, fp0
, fd
);
11193 tcg_temp_free_i32(fp0
);
11197 check_insn(ctx
, ISA_MIPS32R6
);
11199 TCGv_i32 fp0
= tcg_temp_new_i32();
11200 gen_load_fpr32(ctx
, fp0
, fs
);
11201 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11202 gen_store_fpr32(ctx
, fp0
, fd
);
11203 tcg_temp_free_i32(fp0
);
11206 case OPC_MIN_S
: /* OPC_RECIP2_S */
11207 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11209 TCGv_i32 fp0
= tcg_temp_new_i32();
11210 TCGv_i32 fp1
= tcg_temp_new_i32();
11211 TCGv_i32 fp2
= tcg_temp_new_i32();
11212 gen_load_fpr32(ctx
, fp0
, fs
);
11213 gen_load_fpr32(ctx
, fp1
, ft
);
11214 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11215 gen_store_fpr32(ctx
, fp2
, fd
);
11216 tcg_temp_free_i32(fp2
);
11217 tcg_temp_free_i32(fp1
);
11218 tcg_temp_free_i32(fp0
);
11221 check_cp1_64bitmode(ctx
);
11223 TCGv_i32 fp0
= tcg_temp_new_i32();
11224 TCGv_i32 fp1
= tcg_temp_new_i32();
11226 gen_load_fpr32(ctx
, fp0
, fs
);
11227 gen_load_fpr32(ctx
, fp1
, ft
);
11228 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11229 tcg_temp_free_i32(fp1
);
11230 gen_store_fpr32(ctx
, fp0
, fd
);
11231 tcg_temp_free_i32(fp0
);
11235 case OPC_MINA_S
: /* OPC_RECIP1_S */
11236 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11238 TCGv_i32 fp0
= tcg_temp_new_i32();
11239 TCGv_i32 fp1
= tcg_temp_new_i32();
11240 TCGv_i32 fp2
= tcg_temp_new_i32();
11241 gen_load_fpr32(ctx
, fp0
, fs
);
11242 gen_load_fpr32(ctx
, fp1
, ft
);
11243 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11244 gen_store_fpr32(ctx
, fp2
, fd
);
11245 tcg_temp_free_i32(fp2
);
11246 tcg_temp_free_i32(fp1
);
11247 tcg_temp_free_i32(fp0
);
11250 check_cp1_64bitmode(ctx
);
11252 TCGv_i32 fp0
= tcg_temp_new_i32();
11254 gen_load_fpr32(ctx
, fp0
, fs
);
11255 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11256 gen_store_fpr32(ctx
, fp0
, fd
);
11257 tcg_temp_free_i32(fp0
);
11261 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11262 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11264 TCGv_i32 fp0
= tcg_temp_new_i32();
11265 TCGv_i32 fp1
= tcg_temp_new_i32();
11266 gen_load_fpr32(ctx
, fp0
, fs
);
11267 gen_load_fpr32(ctx
, fp1
, ft
);
11268 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11269 gen_store_fpr32(ctx
, fp1
, fd
);
11270 tcg_temp_free_i32(fp1
);
11271 tcg_temp_free_i32(fp0
);
11274 check_cp1_64bitmode(ctx
);
11276 TCGv_i32 fp0
= tcg_temp_new_i32();
11278 gen_load_fpr32(ctx
, fp0
, fs
);
11279 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11280 gen_store_fpr32(ctx
, fp0
, fd
);
11281 tcg_temp_free_i32(fp0
);
11285 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11286 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11288 TCGv_i32 fp0
= tcg_temp_new_i32();
11289 TCGv_i32 fp1
= tcg_temp_new_i32();
11290 gen_load_fpr32(ctx
, fp0
, fs
);
11291 gen_load_fpr32(ctx
, fp1
, ft
);
11292 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11293 gen_store_fpr32(ctx
, fp1
, fd
);
11294 tcg_temp_free_i32(fp1
);
11295 tcg_temp_free_i32(fp0
);
11298 check_cp1_64bitmode(ctx
);
11300 TCGv_i32 fp0
= tcg_temp_new_i32();
11301 TCGv_i32 fp1
= tcg_temp_new_i32();
11303 gen_load_fpr32(ctx
, fp0
, fs
);
11304 gen_load_fpr32(ctx
, fp1
, ft
);
11305 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11306 tcg_temp_free_i32(fp1
);
11307 gen_store_fpr32(ctx
, fp0
, fd
);
11308 tcg_temp_free_i32(fp0
);
11313 check_cp1_registers(ctx
, fd
);
11315 TCGv_i32 fp32
= tcg_temp_new_i32();
11316 TCGv_i64 fp64
= tcg_temp_new_i64();
11318 gen_load_fpr32(ctx
, fp32
, fs
);
11319 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11320 tcg_temp_free_i32(fp32
);
11321 gen_store_fpr64(ctx
, fp64
, fd
);
11322 tcg_temp_free_i64(fp64
);
11327 TCGv_i32 fp0
= tcg_temp_new_i32();
11329 gen_load_fpr32(ctx
, fp0
, fs
);
11330 if (ctx
->nan2008
) {
11331 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11333 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11335 gen_store_fpr32(ctx
, fp0
, fd
);
11336 tcg_temp_free_i32(fp0
);
11340 check_cp1_64bitmode(ctx
);
11342 TCGv_i32 fp32
= tcg_temp_new_i32();
11343 TCGv_i64 fp64
= tcg_temp_new_i64();
11345 gen_load_fpr32(ctx
, fp32
, fs
);
11346 if (ctx
->nan2008
) {
11347 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11349 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11351 tcg_temp_free_i32(fp32
);
11352 gen_store_fpr64(ctx
, fp64
, fd
);
11353 tcg_temp_free_i64(fp64
);
11359 TCGv_i64 fp64
= tcg_temp_new_i64();
11360 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11361 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11363 gen_load_fpr32(ctx
, fp32_0
, fs
);
11364 gen_load_fpr32(ctx
, fp32_1
, ft
);
11365 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11366 tcg_temp_free_i32(fp32_1
);
11367 tcg_temp_free_i32(fp32_0
);
11368 gen_store_fpr64(ctx
, fp64
, fd
);
11369 tcg_temp_free_i64(fp64
);
11375 case OPC_CMP_UEQ_S
:
11376 case OPC_CMP_OLT_S
:
11377 case OPC_CMP_ULT_S
:
11378 case OPC_CMP_OLE_S
:
11379 case OPC_CMP_ULE_S
:
11381 case OPC_CMP_NGLE_S
:
11382 case OPC_CMP_SEQ_S
:
11383 case OPC_CMP_NGL_S
:
11385 case OPC_CMP_NGE_S
:
11387 case OPC_CMP_NGT_S
:
11388 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11389 if (ctx
->opcode
& (1 << 6)) {
11390 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11392 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11396 check_cp1_registers(ctx
, fs
| ft
| fd
);
11398 TCGv_i64 fp0
= tcg_temp_new_i64();
11399 TCGv_i64 fp1
= tcg_temp_new_i64();
11401 gen_load_fpr64(ctx
, fp0
, fs
);
11402 gen_load_fpr64(ctx
, fp1
, ft
);
11403 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11404 tcg_temp_free_i64(fp1
);
11405 gen_store_fpr64(ctx
, fp0
, fd
);
11406 tcg_temp_free_i64(fp0
);
11410 check_cp1_registers(ctx
, fs
| ft
| fd
);
11412 TCGv_i64 fp0
= tcg_temp_new_i64();
11413 TCGv_i64 fp1
= tcg_temp_new_i64();
11415 gen_load_fpr64(ctx
, fp0
, fs
);
11416 gen_load_fpr64(ctx
, fp1
, ft
);
11417 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11418 tcg_temp_free_i64(fp1
);
11419 gen_store_fpr64(ctx
, fp0
, fd
);
11420 tcg_temp_free_i64(fp0
);
11424 check_cp1_registers(ctx
, fs
| ft
| fd
);
11426 TCGv_i64 fp0
= tcg_temp_new_i64();
11427 TCGv_i64 fp1
= tcg_temp_new_i64();
11429 gen_load_fpr64(ctx
, fp0
, fs
);
11430 gen_load_fpr64(ctx
, fp1
, ft
);
11431 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11432 tcg_temp_free_i64(fp1
);
11433 gen_store_fpr64(ctx
, fp0
, fd
);
11434 tcg_temp_free_i64(fp0
);
11438 check_cp1_registers(ctx
, fs
| ft
| fd
);
11440 TCGv_i64 fp0
= tcg_temp_new_i64();
11441 TCGv_i64 fp1
= tcg_temp_new_i64();
11443 gen_load_fpr64(ctx
, fp0
, fs
);
11444 gen_load_fpr64(ctx
, fp1
, ft
);
11445 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11446 tcg_temp_free_i64(fp1
);
11447 gen_store_fpr64(ctx
, fp0
, fd
);
11448 tcg_temp_free_i64(fp0
);
11452 check_cp1_registers(ctx
, fs
| fd
);
11454 TCGv_i64 fp0
= tcg_temp_new_i64();
11456 gen_load_fpr64(ctx
, fp0
, fs
);
11457 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11458 gen_store_fpr64(ctx
, fp0
, fd
);
11459 tcg_temp_free_i64(fp0
);
11463 check_cp1_registers(ctx
, fs
| fd
);
11465 TCGv_i64 fp0
= tcg_temp_new_i64();
11467 gen_load_fpr64(ctx
, fp0
, fs
);
11468 if (ctx
->abs2008
) {
11469 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11471 gen_helper_float_abs_d(fp0
, fp0
);
11473 gen_store_fpr64(ctx
, fp0
, fd
);
11474 tcg_temp_free_i64(fp0
);
11478 check_cp1_registers(ctx
, fs
| fd
);
11480 TCGv_i64 fp0
= tcg_temp_new_i64();
11482 gen_load_fpr64(ctx
, fp0
, fs
);
11483 gen_store_fpr64(ctx
, fp0
, fd
);
11484 tcg_temp_free_i64(fp0
);
11488 check_cp1_registers(ctx
, fs
| fd
);
11490 TCGv_i64 fp0
= tcg_temp_new_i64();
11492 gen_load_fpr64(ctx
, fp0
, fs
);
11493 if (ctx
->abs2008
) {
11494 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11496 gen_helper_float_chs_d(fp0
, fp0
);
11498 gen_store_fpr64(ctx
, fp0
, fd
);
11499 tcg_temp_free_i64(fp0
);
11502 case OPC_ROUND_L_D
:
11503 check_cp1_64bitmode(ctx
);
11505 TCGv_i64 fp0
= tcg_temp_new_i64();
11507 gen_load_fpr64(ctx
, fp0
, fs
);
11508 if (ctx
->nan2008
) {
11509 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11511 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11513 gen_store_fpr64(ctx
, fp0
, fd
);
11514 tcg_temp_free_i64(fp0
);
11517 case OPC_TRUNC_L_D
:
11518 check_cp1_64bitmode(ctx
);
11520 TCGv_i64 fp0
= tcg_temp_new_i64();
11522 gen_load_fpr64(ctx
, fp0
, fs
);
11523 if (ctx
->nan2008
) {
11524 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11526 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11528 gen_store_fpr64(ctx
, fp0
, fd
);
11529 tcg_temp_free_i64(fp0
);
11533 check_cp1_64bitmode(ctx
);
11535 TCGv_i64 fp0
= tcg_temp_new_i64();
11537 gen_load_fpr64(ctx
, fp0
, fs
);
11538 if (ctx
->nan2008
) {
11539 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11541 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11543 gen_store_fpr64(ctx
, fp0
, fd
);
11544 tcg_temp_free_i64(fp0
);
11547 case OPC_FLOOR_L_D
:
11548 check_cp1_64bitmode(ctx
);
11550 TCGv_i64 fp0
= tcg_temp_new_i64();
11552 gen_load_fpr64(ctx
, fp0
, fs
);
11553 if (ctx
->nan2008
) {
11554 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11556 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11558 gen_store_fpr64(ctx
, fp0
, fd
);
11559 tcg_temp_free_i64(fp0
);
11562 case OPC_ROUND_W_D
:
11563 check_cp1_registers(ctx
, fs
);
11565 TCGv_i32 fp32
= tcg_temp_new_i32();
11566 TCGv_i64 fp64
= tcg_temp_new_i64();
11568 gen_load_fpr64(ctx
, fp64
, fs
);
11569 if (ctx
->nan2008
) {
11570 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11572 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11574 tcg_temp_free_i64(fp64
);
11575 gen_store_fpr32(ctx
, fp32
, fd
);
11576 tcg_temp_free_i32(fp32
);
11579 case OPC_TRUNC_W_D
:
11580 check_cp1_registers(ctx
, fs
);
11582 TCGv_i32 fp32
= tcg_temp_new_i32();
11583 TCGv_i64 fp64
= tcg_temp_new_i64();
11585 gen_load_fpr64(ctx
, fp64
, fs
);
11586 if (ctx
->nan2008
) {
11587 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11589 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11591 tcg_temp_free_i64(fp64
);
11592 gen_store_fpr32(ctx
, fp32
, fd
);
11593 tcg_temp_free_i32(fp32
);
11597 check_cp1_registers(ctx
, fs
);
11599 TCGv_i32 fp32
= tcg_temp_new_i32();
11600 TCGv_i64 fp64
= tcg_temp_new_i64();
11602 gen_load_fpr64(ctx
, fp64
, fs
);
11603 if (ctx
->nan2008
) {
11604 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11606 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11608 tcg_temp_free_i64(fp64
);
11609 gen_store_fpr32(ctx
, fp32
, fd
);
11610 tcg_temp_free_i32(fp32
);
11613 case OPC_FLOOR_W_D
:
11614 check_cp1_registers(ctx
, fs
);
11616 TCGv_i32 fp32
= tcg_temp_new_i32();
11617 TCGv_i64 fp64
= tcg_temp_new_i64();
11619 gen_load_fpr64(ctx
, fp64
, fs
);
11620 if (ctx
->nan2008
) {
11621 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11623 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11625 tcg_temp_free_i64(fp64
);
11626 gen_store_fpr32(ctx
, fp32
, fd
);
11627 tcg_temp_free_i32(fp32
);
11631 check_insn(ctx
, ISA_MIPS32R6
);
11632 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11635 check_insn(ctx
, ISA_MIPS32R6
);
11636 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11639 check_insn(ctx
, ISA_MIPS32R6
);
11640 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11643 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11644 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11647 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11649 TCGLabel
*l1
= gen_new_label();
11653 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11655 fp0
= tcg_temp_new_i64();
11656 gen_load_fpr64(ctx
, fp0
, fs
);
11657 gen_store_fpr64(ctx
, fp0
, fd
);
11658 tcg_temp_free_i64(fp0
);
11663 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11665 TCGLabel
*l1
= gen_new_label();
11669 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11670 fp0
= tcg_temp_new_i64();
11671 gen_load_fpr64(ctx
, fp0
, fs
);
11672 gen_store_fpr64(ctx
, fp0
, fd
);
11673 tcg_temp_free_i64(fp0
);
11679 check_cp1_registers(ctx
, fs
| fd
);
11681 TCGv_i64 fp0
= tcg_temp_new_i64();
11683 gen_load_fpr64(ctx
, fp0
, fs
);
11684 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11685 gen_store_fpr64(ctx
, fp0
, fd
);
11686 tcg_temp_free_i64(fp0
);
11690 check_cp1_registers(ctx
, fs
| fd
);
11692 TCGv_i64 fp0
= tcg_temp_new_i64();
11694 gen_load_fpr64(ctx
, fp0
, fs
);
11695 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11696 gen_store_fpr64(ctx
, fp0
, fd
);
11697 tcg_temp_free_i64(fp0
);
11701 check_insn(ctx
, ISA_MIPS32R6
);
11703 TCGv_i64 fp0
= tcg_temp_new_i64();
11704 TCGv_i64 fp1
= tcg_temp_new_i64();
11705 TCGv_i64 fp2
= tcg_temp_new_i64();
11706 gen_load_fpr64(ctx
, fp0
, fs
);
11707 gen_load_fpr64(ctx
, fp1
, ft
);
11708 gen_load_fpr64(ctx
, fp2
, fd
);
11709 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11710 gen_store_fpr64(ctx
, fp2
, fd
);
11711 tcg_temp_free_i64(fp2
);
11712 tcg_temp_free_i64(fp1
);
11713 tcg_temp_free_i64(fp0
);
11717 check_insn(ctx
, ISA_MIPS32R6
);
11719 TCGv_i64 fp0
= tcg_temp_new_i64();
11720 TCGv_i64 fp1
= tcg_temp_new_i64();
11721 TCGv_i64 fp2
= tcg_temp_new_i64();
11722 gen_load_fpr64(ctx
, fp0
, fs
);
11723 gen_load_fpr64(ctx
, fp1
, ft
);
11724 gen_load_fpr64(ctx
, fp2
, fd
);
11725 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11726 gen_store_fpr64(ctx
, fp2
, fd
);
11727 tcg_temp_free_i64(fp2
);
11728 tcg_temp_free_i64(fp1
);
11729 tcg_temp_free_i64(fp0
);
11733 check_insn(ctx
, ISA_MIPS32R6
);
11735 TCGv_i64 fp0
= tcg_temp_new_i64();
11736 gen_load_fpr64(ctx
, fp0
, fs
);
11737 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11738 gen_store_fpr64(ctx
, fp0
, fd
);
11739 tcg_temp_free_i64(fp0
);
11743 check_insn(ctx
, ISA_MIPS32R6
);
11745 TCGv_i64 fp0
= tcg_temp_new_i64();
11746 gen_load_fpr64(ctx
, fp0
, fs
);
11747 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11748 gen_store_fpr64(ctx
, fp0
, fd
);
11749 tcg_temp_free_i64(fp0
);
11752 case OPC_MIN_D
: /* OPC_RECIP2_D */
11753 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11755 TCGv_i64 fp0
= tcg_temp_new_i64();
11756 TCGv_i64 fp1
= tcg_temp_new_i64();
11757 gen_load_fpr64(ctx
, fp0
, fs
);
11758 gen_load_fpr64(ctx
, fp1
, ft
);
11759 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11760 gen_store_fpr64(ctx
, fp1
, fd
);
11761 tcg_temp_free_i64(fp1
);
11762 tcg_temp_free_i64(fp0
);
11765 check_cp1_64bitmode(ctx
);
11767 TCGv_i64 fp0
= tcg_temp_new_i64();
11768 TCGv_i64 fp1
= tcg_temp_new_i64();
11770 gen_load_fpr64(ctx
, fp0
, fs
);
11771 gen_load_fpr64(ctx
, fp1
, ft
);
11772 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11773 tcg_temp_free_i64(fp1
);
11774 gen_store_fpr64(ctx
, fp0
, fd
);
11775 tcg_temp_free_i64(fp0
);
11779 case OPC_MINA_D
: /* OPC_RECIP1_D */
11780 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11782 TCGv_i64 fp0
= tcg_temp_new_i64();
11783 TCGv_i64 fp1
= tcg_temp_new_i64();
11784 gen_load_fpr64(ctx
, fp0
, fs
);
11785 gen_load_fpr64(ctx
, fp1
, ft
);
11786 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11787 gen_store_fpr64(ctx
, fp1
, fd
);
11788 tcg_temp_free_i64(fp1
);
11789 tcg_temp_free_i64(fp0
);
11792 check_cp1_64bitmode(ctx
);
11794 TCGv_i64 fp0
= tcg_temp_new_i64();
11796 gen_load_fpr64(ctx
, fp0
, fs
);
11797 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11798 gen_store_fpr64(ctx
, fp0
, fd
);
11799 tcg_temp_free_i64(fp0
);
11803 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11804 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11806 TCGv_i64 fp0
= tcg_temp_new_i64();
11807 TCGv_i64 fp1
= tcg_temp_new_i64();
11808 gen_load_fpr64(ctx
, fp0
, fs
);
11809 gen_load_fpr64(ctx
, fp1
, ft
);
11810 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11811 gen_store_fpr64(ctx
, fp1
, fd
);
11812 tcg_temp_free_i64(fp1
);
11813 tcg_temp_free_i64(fp0
);
11816 check_cp1_64bitmode(ctx
);
11818 TCGv_i64 fp0
= tcg_temp_new_i64();
11820 gen_load_fpr64(ctx
, fp0
, fs
);
11821 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11822 gen_store_fpr64(ctx
, fp0
, fd
);
11823 tcg_temp_free_i64(fp0
);
11827 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11828 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11830 TCGv_i64 fp0
= tcg_temp_new_i64();
11831 TCGv_i64 fp1
= tcg_temp_new_i64();
11832 gen_load_fpr64(ctx
, fp0
, fs
);
11833 gen_load_fpr64(ctx
, fp1
, ft
);
11834 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11835 gen_store_fpr64(ctx
, fp1
, fd
);
11836 tcg_temp_free_i64(fp1
);
11837 tcg_temp_free_i64(fp0
);
11840 check_cp1_64bitmode(ctx
);
11842 TCGv_i64 fp0
= tcg_temp_new_i64();
11843 TCGv_i64 fp1
= tcg_temp_new_i64();
11845 gen_load_fpr64(ctx
, fp0
, fs
);
11846 gen_load_fpr64(ctx
, fp1
, ft
);
11847 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11848 tcg_temp_free_i64(fp1
);
11849 gen_store_fpr64(ctx
, fp0
, fd
);
11850 tcg_temp_free_i64(fp0
);
11857 case OPC_CMP_UEQ_D
:
11858 case OPC_CMP_OLT_D
:
11859 case OPC_CMP_ULT_D
:
11860 case OPC_CMP_OLE_D
:
11861 case OPC_CMP_ULE_D
:
11863 case OPC_CMP_NGLE_D
:
11864 case OPC_CMP_SEQ_D
:
11865 case OPC_CMP_NGL_D
:
11867 case OPC_CMP_NGE_D
:
11869 case OPC_CMP_NGT_D
:
11870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11871 if (ctx
->opcode
& (1 << 6)) {
11872 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11874 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11878 check_cp1_registers(ctx
, fs
);
11880 TCGv_i32 fp32
= tcg_temp_new_i32();
11881 TCGv_i64 fp64
= tcg_temp_new_i64();
11883 gen_load_fpr64(ctx
, fp64
, fs
);
11884 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11885 tcg_temp_free_i64(fp64
);
11886 gen_store_fpr32(ctx
, fp32
, fd
);
11887 tcg_temp_free_i32(fp32
);
11891 check_cp1_registers(ctx
, fs
);
11893 TCGv_i32 fp32
= tcg_temp_new_i32();
11894 TCGv_i64 fp64
= tcg_temp_new_i64();
11896 gen_load_fpr64(ctx
, fp64
, fs
);
11897 if (ctx
->nan2008
) {
11898 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11900 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11902 tcg_temp_free_i64(fp64
);
11903 gen_store_fpr32(ctx
, fp32
, fd
);
11904 tcg_temp_free_i32(fp32
);
11908 check_cp1_64bitmode(ctx
);
11910 TCGv_i64 fp0
= tcg_temp_new_i64();
11912 gen_load_fpr64(ctx
, fp0
, fs
);
11913 if (ctx
->nan2008
) {
11914 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
11916 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
11918 gen_store_fpr64(ctx
, fp0
, fd
);
11919 tcg_temp_free_i64(fp0
);
11924 TCGv_i32 fp0
= tcg_temp_new_i32();
11926 gen_load_fpr32(ctx
, fp0
, fs
);
11927 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
11928 gen_store_fpr32(ctx
, fp0
, fd
);
11929 tcg_temp_free_i32(fp0
);
11933 check_cp1_registers(ctx
, fd
);
11935 TCGv_i32 fp32
= tcg_temp_new_i32();
11936 TCGv_i64 fp64
= tcg_temp_new_i64();
11938 gen_load_fpr32(ctx
, fp32
, fs
);
11939 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
11940 tcg_temp_free_i32(fp32
);
11941 gen_store_fpr64(ctx
, fp64
, fd
);
11942 tcg_temp_free_i64(fp64
);
11946 check_cp1_64bitmode(ctx
);
11948 TCGv_i32 fp32
= tcg_temp_new_i32();
11949 TCGv_i64 fp64
= tcg_temp_new_i64();
11951 gen_load_fpr64(ctx
, fp64
, fs
);
11952 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
11953 tcg_temp_free_i64(fp64
);
11954 gen_store_fpr32(ctx
, fp32
, fd
);
11955 tcg_temp_free_i32(fp32
);
11959 check_cp1_64bitmode(ctx
);
11961 TCGv_i64 fp0
= tcg_temp_new_i64();
11963 gen_load_fpr64(ctx
, fp0
, fs
);
11964 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11965 gen_store_fpr64(ctx
, fp0
, fd
);
11966 tcg_temp_free_i64(fp0
);
11969 case OPC_CVT_PS_PW
:
11972 TCGv_i64 fp0
= tcg_temp_new_i64();
11974 gen_load_fpr64(ctx
, fp0
, fs
);
11975 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11976 gen_store_fpr64(ctx
, fp0
, fd
);
11977 tcg_temp_free_i64(fp0
);
11983 TCGv_i64 fp0
= tcg_temp_new_i64();
11984 TCGv_i64 fp1
= tcg_temp_new_i64();
11986 gen_load_fpr64(ctx
, fp0
, fs
);
11987 gen_load_fpr64(ctx
, fp1
, ft
);
11988 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11989 tcg_temp_free_i64(fp1
);
11990 gen_store_fpr64(ctx
, fp0
, fd
);
11991 tcg_temp_free_i64(fp0
);
11997 TCGv_i64 fp0
= tcg_temp_new_i64();
11998 TCGv_i64 fp1
= tcg_temp_new_i64();
12000 gen_load_fpr64(ctx
, fp0
, fs
);
12001 gen_load_fpr64(ctx
, fp1
, ft
);
12002 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12003 tcg_temp_free_i64(fp1
);
12004 gen_store_fpr64(ctx
, fp0
, fd
);
12005 tcg_temp_free_i64(fp0
);
12011 TCGv_i64 fp0
= tcg_temp_new_i64();
12012 TCGv_i64 fp1
= tcg_temp_new_i64();
12014 gen_load_fpr64(ctx
, fp0
, fs
);
12015 gen_load_fpr64(ctx
, fp1
, ft
);
12016 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12017 tcg_temp_free_i64(fp1
);
12018 gen_store_fpr64(ctx
, fp0
, fd
);
12019 tcg_temp_free_i64(fp0
);
12025 TCGv_i64 fp0
= tcg_temp_new_i64();
12027 gen_load_fpr64(ctx
, fp0
, fs
);
12028 gen_helper_float_abs_ps(fp0
, fp0
);
12029 gen_store_fpr64(ctx
, fp0
, fd
);
12030 tcg_temp_free_i64(fp0
);
12036 TCGv_i64 fp0
= tcg_temp_new_i64();
12038 gen_load_fpr64(ctx
, fp0
, fs
);
12039 gen_store_fpr64(ctx
, fp0
, fd
);
12040 tcg_temp_free_i64(fp0
);
12046 TCGv_i64 fp0
= tcg_temp_new_i64();
12048 gen_load_fpr64(ctx
, fp0
, fs
);
12049 gen_helper_float_chs_ps(fp0
, fp0
);
12050 gen_store_fpr64(ctx
, fp0
, fd
);
12051 tcg_temp_free_i64(fp0
);
12056 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12061 TCGLabel
*l1
= gen_new_label();
12065 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12066 fp0
= tcg_temp_new_i64();
12067 gen_load_fpr64(ctx
, fp0
, fs
);
12068 gen_store_fpr64(ctx
, fp0
, fd
);
12069 tcg_temp_free_i64(fp0
);
12076 TCGLabel
*l1
= gen_new_label();
12080 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12081 fp0
= tcg_temp_new_i64();
12082 gen_load_fpr64(ctx
, fp0
, fs
);
12083 gen_store_fpr64(ctx
, fp0
, fd
);
12084 tcg_temp_free_i64(fp0
);
12092 TCGv_i64 fp0
= tcg_temp_new_i64();
12093 TCGv_i64 fp1
= tcg_temp_new_i64();
12095 gen_load_fpr64(ctx
, fp0
, ft
);
12096 gen_load_fpr64(ctx
, fp1
, fs
);
12097 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12098 tcg_temp_free_i64(fp1
);
12099 gen_store_fpr64(ctx
, fp0
, fd
);
12100 tcg_temp_free_i64(fp0
);
12106 TCGv_i64 fp0
= tcg_temp_new_i64();
12107 TCGv_i64 fp1
= tcg_temp_new_i64();
12109 gen_load_fpr64(ctx
, fp0
, ft
);
12110 gen_load_fpr64(ctx
, fp1
, fs
);
12111 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12112 tcg_temp_free_i64(fp1
);
12113 gen_store_fpr64(ctx
, fp0
, fd
);
12114 tcg_temp_free_i64(fp0
);
12117 case OPC_RECIP2_PS
:
12120 TCGv_i64 fp0
= tcg_temp_new_i64();
12121 TCGv_i64 fp1
= tcg_temp_new_i64();
12123 gen_load_fpr64(ctx
, fp0
, fs
);
12124 gen_load_fpr64(ctx
, fp1
, ft
);
12125 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12126 tcg_temp_free_i64(fp1
);
12127 gen_store_fpr64(ctx
, fp0
, fd
);
12128 tcg_temp_free_i64(fp0
);
12131 case OPC_RECIP1_PS
:
12134 TCGv_i64 fp0
= tcg_temp_new_i64();
12136 gen_load_fpr64(ctx
, fp0
, fs
);
12137 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12138 gen_store_fpr64(ctx
, fp0
, fd
);
12139 tcg_temp_free_i64(fp0
);
12142 case OPC_RSQRT1_PS
:
12145 TCGv_i64 fp0
= tcg_temp_new_i64();
12147 gen_load_fpr64(ctx
, fp0
, fs
);
12148 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12149 gen_store_fpr64(ctx
, fp0
, fd
);
12150 tcg_temp_free_i64(fp0
);
12153 case OPC_RSQRT2_PS
:
12156 TCGv_i64 fp0
= tcg_temp_new_i64();
12157 TCGv_i64 fp1
= tcg_temp_new_i64();
12159 gen_load_fpr64(ctx
, fp0
, fs
);
12160 gen_load_fpr64(ctx
, fp1
, ft
);
12161 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12162 tcg_temp_free_i64(fp1
);
12163 gen_store_fpr64(ctx
, fp0
, fd
);
12164 tcg_temp_free_i64(fp0
);
12168 check_cp1_64bitmode(ctx
);
12170 TCGv_i32 fp0
= tcg_temp_new_i32();
12172 gen_load_fpr32h(ctx
, fp0
, fs
);
12173 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12174 gen_store_fpr32(ctx
, fp0
, fd
);
12175 tcg_temp_free_i32(fp0
);
12178 case OPC_CVT_PW_PS
:
12181 TCGv_i64 fp0
= tcg_temp_new_i64();
12183 gen_load_fpr64(ctx
, fp0
, fs
);
12184 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12185 gen_store_fpr64(ctx
, fp0
, fd
);
12186 tcg_temp_free_i64(fp0
);
12190 check_cp1_64bitmode(ctx
);
12192 TCGv_i32 fp0
= tcg_temp_new_i32();
12194 gen_load_fpr32(ctx
, fp0
, fs
);
12195 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12196 gen_store_fpr32(ctx
, fp0
, fd
);
12197 tcg_temp_free_i32(fp0
);
12203 TCGv_i32 fp0
= tcg_temp_new_i32();
12204 TCGv_i32 fp1
= tcg_temp_new_i32();
12206 gen_load_fpr32(ctx
, fp0
, fs
);
12207 gen_load_fpr32(ctx
, fp1
, ft
);
12208 gen_store_fpr32h(ctx
, fp0
, fd
);
12209 gen_store_fpr32(ctx
, fp1
, fd
);
12210 tcg_temp_free_i32(fp0
);
12211 tcg_temp_free_i32(fp1
);
12217 TCGv_i32 fp0
= tcg_temp_new_i32();
12218 TCGv_i32 fp1
= tcg_temp_new_i32();
12220 gen_load_fpr32(ctx
, fp0
, fs
);
12221 gen_load_fpr32h(ctx
, fp1
, ft
);
12222 gen_store_fpr32(ctx
, fp1
, fd
);
12223 gen_store_fpr32h(ctx
, fp0
, fd
);
12224 tcg_temp_free_i32(fp0
);
12225 tcg_temp_free_i32(fp1
);
12231 TCGv_i32 fp0
= tcg_temp_new_i32();
12232 TCGv_i32 fp1
= tcg_temp_new_i32();
12234 gen_load_fpr32h(ctx
, fp0
, fs
);
12235 gen_load_fpr32(ctx
, fp1
, ft
);
12236 gen_store_fpr32(ctx
, fp1
, fd
);
12237 gen_store_fpr32h(ctx
, fp0
, fd
);
12238 tcg_temp_free_i32(fp0
);
12239 tcg_temp_free_i32(fp1
);
12245 TCGv_i32 fp0
= tcg_temp_new_i32();
12246 TCGv_i32 fp1
= tcg_temp_new_i32();
12248 gen_load_fpr32h(ctx
, fp0
, fs
);
12249 gen_load_fpr32h(ctx
, fp1
, ft
);
12250 gen_store_fpr32(ctx
, fp1
, fd
);
12251 gen_store_fpr32h(ctx
, fp0
, fd
);
12252 tcg_temp_free_i32(fp0
);
12253 tcg_temp_free_i32(fp1
);
12257 case OPC_CMP_UN_PS
:
12258 case OPC_CMP_EQ_PS
:
12259 case OPC_CMP_UEQ_PS
:
12260 case OPC_CMP_OLT_PS
:
12261 case OPC_CMP_ULT_PS
:
12262 case OPC_CMP_OLE_PS
:
12263 case OPC_CMP_ULE_PS
:
12264 case OPC_CMP_SF_PS
:
12265 case OPC_CMP_NGLE_PS
:
12266 case OPC_CMP_SEQ_PS
:
12267 case OPC_CMP_NGL_PS
:
12268 case OPC_CMP_LT_PS
:
12269 case OPC_CMP_NGE_PS
:
12270 case OPC_CMP_LE_PS
:
12271 case OPC_CMP_NGT_PS
:
12272 if (ctx
->opcode
& (1 << 6)) {
12273 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12275 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12279 MIPS_INVAL("farith");
12280 generate_exception_end(ctx
, EXCP_RI
);
12285 /* Coprocessor 3 (FPU) */
12286 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
12287 int fd
, int fs
, int base
, int index
)
12289 TCGv t0
= tcg_temp_new();
12292 gen_load_gpr(t0
, index
);
12293 } else if (index
== 0) {
12294 gen_load_gpr(t0
, base
);
12296 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12298 /* Don't do NOP if destination is zero: we must perform the actual
12304 TCGv_i32 fp0
= tcg_temp_new_i32();
12306 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12307 tcg_gen_trunc_tl_i32(fp0
, t0
);
12308 gen_store_fpr32(ctx
, fp0
, fd
);
12309 tcg_temp_free_i32(fp0
);
12314 check_cp1_registers(ctx
, fd
);
12316 TCGv_i64 fp0
= tcg_temp_new_i64();
12317 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12318 gen_store_fpr64(ctx
, fp0
, fd
);
12319 tcg_temp_free_i64(fp0
);
12323 check_cp1_64bitmode(ctx
);
12324 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12326 TCGv_i64 fp0
= tcg_temp_new_i64();
12328 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12329 gen_store_fpr64(ctx
, fp0
, fd
);
12330 tcg_temp_free_i64(fp0
);
12336 TCGv_i32 fp0
= tcg_temp_new_i32();
12337 gen_load_fpr32(ctx
, fp0
, fs
);
12338 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12339 tcg_temp_free_i32(fp0
);
12344 check_cp1_registers(ctx
, fs
);
12346 TCGv_i64 fp0
= tcg_temp_new_i64();
12347 gen_load_fpr64(ctx
, fp0
, fs
);
12348 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12349 tcg_temp_free_i64(fp0
);
12353 check_cp1_64bitmode(ctx
);
12354 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12356 TCGv_i64 fp0
= tcg_temp_new_i64();
12357 gen_load_fpr64(ctx
, fp0
, fs
);
12358 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12359 tcg_temp_free_i64(fp0
);
12366 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
12367 int fd
, int fr
, int fs
, int ft
)
12373 TCGv t0
= tcg_temp_local_new();
12374 TCGv_i32 fp
= tcg_temp_new_i32();
12375 TCGv_i32 fph
= tcg_temp_new_i32();
12376 TCGLabel
*l1
= gen_new_label();
12377 TCGLabel
*l2
= gen_new_label();
12379 gen_load_gpr(t0
, fr
);
12380 tcg_gen_andi_tl(t0
, t0
, 0x7);
12382 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12383 gen_load_fpr32(ctx
, fp
, fs
);
12384 gen_load_fpr32h(ctx
, fph
, fs
);
12385 gen_store_fpr32(ctx
, fp
, fd
);
12386 gen_store_fpr32h(ctx
, fph
, fd
);
12389 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12391 #ifdef TARGET_WORDS_BIGENDIAN
12392 gen_load_fpr32(ctx
, fp
, fs
);
12393 gen_load_fpr32h(ctx
, fph
, ft
);
12394 gen_store_fpr32h(ctx
, fp
, fd
);
12395 gen_store_fpr32(ctx
, fph
, fd
);
12397 gen_load_fpr32h(ctx
, fph
, fs
);
12398 gen_load_fpr32(ctx
, fp
, ft
);
12399 gen_store_fpr32(ctx
, fph
, fd
);
12400 gen_store_fpr32h(ctx
, fp
, fd
);
12403 tcg_temp_free_i32(fp
);
12404 tcg_temp_free_i32(fph
);
12410 TCGv_i32 fp0
= tcg_temp_new_i32();
12411 TCGv_i32 fp1
= tcg_temp_new_i32();
12412 TCGv_i32 fp2
= tcg_temp_new_i32();
12414 gen_load_fpr32(ctx
, fp0
, fs
);
12415 gen_load_fpr32(ctx
, fp1
, ft
);
12416 gen_load_fpr32(ctx
, fp2
, fr
);
12417 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12418 tcg_temp_free_i32(fp0
);
12419 tcg_temp_free_i32(fp1
);
12420 gen_store_fpr32(ctx
, fp2
, fd
);
12421 tcg_temp_free_i32(fp2
);
12426 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12428 TCGv_i64 fp0
= tcg_temp_new_i64();
12429 TCGv_i64 fp1
= tcg_temp_new_i64();
12430 TCGv_i64 fp2
= tcg_temp_new_i64();
12432 gen_load_fpr64(ctx
, fp0
, fs
);
12433 gen_load_fpr64(ctx
, fp1
, ft
);
12434 gen_load_fpr64(ctx
, fp2
, fr
);
12435 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12436 tcg_temp_free_i64(fp0
);
12437 tcg_temp_free_i64(fp1
);
12438 gen_store_fpr64(ctx
, fp2
, fd
);
12439 tcg_temp_free_i64(fp2
);
12445 TCGv_i64 fp0
= tcg_temp_new_i64();
12446 TCGv_i64 fp1
= tcg_temp_new_i64();
12447 TCGv_i64 fp2
= tcg_temp_new_i64();
12449 gen_load_fpr64(ctx
, fp0
, fs
);
12450 gen_load_fpr64(ctx
, fp1
, ft
);
12451 gen_load_fpr64(ctx
, fp2
, fr
);
12452 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12453 tcg_temp_free_i64(fp0
);
12454 tcg_temp_free_i64(fp1
);
12455 gen_store_fpr64(ctx
, fp2
, fd
);
12456 tcg_temp_free_i64(fp2
);
12462 TCGv_i32 fp0
= tcg_temp_new_i32();
12463 TCGv_i32 fp1
= tcg_temp_new_i32();
12464 TCGv_i32 fp2
= tcg_temp_new_i32();
12466 gen_load_fpr32(ctx
, fp0
, fs
);
12467 gen_load_fpr32(ctx
, fp1
, ft
);
12468 gen_load_fpr32(ctx
, fp2
, fr
);
12469 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12470 tcg_temp_free_i32(fp0
);
12471 tcg_temp_free_i32(fp1
);
12472 gen_store_fpr32(ctx
, fp2
, fd
);
12473 tcg_temp_free_i32(fp2
);
12478 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12480 TCGv_i64 fp0
= tcg_temp_new_i64();
12481 TCGv_i64 fp1
= tcg_temp_new_i64();
12482 TCGv_i64 fp2
= tcg_temp_new_i64();
12484 gen_load_fpr64(ctx
, fp0
, fs
);
12485 gen_load_fpr64(ctx
, fp1
, ft
);
12486 gen_load_fpr64(ctx
, fp2
, fr
);
12487 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12488 tcg_temp_free_i64(fp0
);
12489 tcg_temp_free_i64(fp1
);
12490 gen_store_fpr64(ctx
, fp2
, fd
);
12491 tcg_temp_free_i64(fp2
);
12497 TCGv_i64 fp0
= tcg_temp_new_i64();
12498 TCGv_i64 fp1
= tcg_temp_new_i64();
12499 TCGv_i64 fp2
= tcg_temp_new_i64();
12501 gen_load_fpr64(ctx
, fp0
, fs
);
12502 gen_load_fpr64(ctx
, fp1
, ft
);
12503 gen_load_fpr64(ctx
, fp2
, fr
);
12504 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12505 tcg_temp_free_i64(fp0
);
12506 tcg_temp_free_i64(fp1
);
12507 gen_store_fpr64(ctx
, fp2
, fd
);
12508 tcg_temp_free_i64(fp2
);
12514 TCGv_i32 fp0
= tcg_temp_new_i32();
12515 TCGv_i32 fp1
= tcg_temp_new_i32();
12516 TCGv_i32 fp2
= tcg_temp_new_i32();
12518 gen_load_fpr32(ctx
, fp0
, fs
);
12519 gen_load_fpr32(ctx
, fp1
, ft
);
12520 gen_load_fpr32(ctx
, fp2
, fr
);
12521 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12522 tcg_temp_free_i32(fp0
);
12523 tcg_temp_free_i32(fp1
);
12524 gen_store_fpr32(ctx
, fp2
, fd
);
12525 tcg_temp_free_i32(fp2
);
12530 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12532 TCGv_i64 fp0
= tcg_temp_new_i64();
12533 TCGv_i64 fp1
= tcg_temp_new_i64();
12534 TCGv_i64 fp2
= tcg_temp_new_i64();
12536 gen_load_fpr64(ctx
, fp0
, fs
);
12537 gen_load_fpr64(ctx
, fp1
, ft
);
12538 gen_load_fpr64(ctx
, fp2
, fr
);
12539 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12540 tcg_temp_free_i64(fp0
);
12541 tcg_temp_free_i64(fp1
);
12542 gen_store_fpr64(ctx
, fp2
, fd
);
12543 tcg_temp_free_i64(fp2
);
12549 TCGv_i64 fp0
= tcg_temp_new_i64();
12550 TCGv_i64 fp1
= tcg_temp_new_i64();
12551 TCGv_i64 fp2
= tcg_temp_new_i64();
12553 gen_load_fpr64(ctx
, fp0
, fs
);
12554 gen_load_fpr64(ctx
, fp1
, ft
);
12555 gen_load_fpr64(ctx
, fp2
, fr
);
12556 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12557 tcg_temp_free_i64(fp0
);
12558 tcg_temp_free_i64(fp1
);
12559 gen_store_fpr64(ctx
, fp2
, fd
);
12560 tcg_temp_free_i64(fp2
);
12566 TCGv_i32 fp0
= tcg_temp_new_i32();
12567 TCGv_i32 fp1
= tcg_temp_new_i32();
12568 TCGv_i32 fp2
= tcg_temp_new_i32();
12570 gen_load_fpr32(ctx
, fp0
, fs
);
12571 gen_load_fpr32(ctx
, fp1
, ft
);
12572 gen_load_fpr32(ctx
, fp2
, fr
);
12573 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12574 tcg_temp_free_i32(fp0
);
12575 tcg_temp_free_i32(fp1
);
12576 gen_store_fpr32(ctx
, fp2
, fd
);
12577 tcg_temp_free_i32(fp2
);
12582 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12584 TCGv_i64 fp0
= tcg_temp_new_i64();
12585 TCGv_i64 fp1
= tcg_temp_new_i64();
12586 TCGv_i64 fp2
= tcg_temp_new_i64();
12588 gen_load_fpr64(ctx
, fp0
, fs
);
12589 gen_load_fpr64(ctx
, fp1
, ft
);
12590 gen_load_fpr64(ctx
, fp2
, fr
);
12591 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12592 tcg_temp_free_i64(fp0
);
12593 tcg_temp_free_i64(fp1
);
12594 gen_store_fpr64(ctx
, fp2
, fd
);
12595 tcg_temp_free_i64(fp2
);
12601 TCGv_i64 fp0
= tcg_temp_new_i64();
12602 TCGv_i64 fp1
= tcg_temp_new_i64();
12603 TCGv_i64 fp2
= tcg_temp_new_i64();
12605 gen_load_fpr64(ctx
, fp0
, fs
);
12606 gen_load_fpr64(ctx
, fp1
, ft
);
12607 gen_load_fpr64(ctx
, fp2
, fr
);
12608 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12609 tcg_temp_free_i64(fp0
);
12610 tcg_temp_free_i64(fp1
);
12611 gen_store_fpr64(ctx
, fp2
, fd
);
12612 tcg_temp_free_i64(fp2
);
12616 MIPS_INVAL("flt3_arith");
12617 generate_exception_end(ctx
, EXCP_RI
);
12622 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12626 #if !defined(CONFIG_USER_ONLY)
12627 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12628 Therefore only check the ISA in system mode. */
12629 check_insn(ctx
, ISA_MIPS32R2
);
12631 t0
= tcg_temp_new();
12635 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12636 gen_store_gpr(t0
, rt
);
12639 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12640 gen_store_gpr(t0
, rt
);
12643 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12646 gen_helper_rdhwr_cc(t0
, cpu_env
);
12647 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12650 gen_store_gpr(t0
, rt
);
12651 /* Break the TB to be able to take timer interrupts immediately
12652 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12653 we break completely out of translated code. */
12654 gen_save_pc(ctx
->base
.pc_next
+ 4);
12655 ctx
->base
.is_jmp
= DISAS_EXIT
;
12658 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12659 gen_store_gpr(t0
, rt
);
12662 check_insn(ctx
, ISA_MIPS32R6
);
12664 /* Performance counter registers are not implemented other than
12665 * control register 0.
12667 generate_exception(ctx
, EXCP_RI
);
12669 gen_helper_rdhwr_performance(t0
, cpu_env
);
12670 gen_store_gpr(t0
, rt
);
12673 check_insn(ctx
, ISA_MIPS32R6
);
12674 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12675 gen_store_gpr(t0
, rt
);
12678 #if defined(CONFIG_USER_ONLY)
12679 tcg_gen_ld_tl(t0
, cpu_env
,
12680 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12681 gen_store_gpr(t0
, rt
);
12684 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12685 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12686 tcg_gen_ld_tl(t0
, cpu_env
,
12687 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12688 gen_store_gpr(t0
, rt
);
12690 generate_exception_end(ctx
, EXCP_RI
);
12694 default: /* Invalid */
12695 MIPS_INVAL("rdhwr");
12696 generate_exception_end(ctx
, EXCP_RI
);
12702 static inline void clear_branch_hflags(DisasContext
*ctx
)
12704 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12705 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12706 save_cpu_state(ctx
, 0);
12708 /* it is not safe to save ctx->hflags as hflags may be changed
12709 in execution time by the instruction in delay / forbidden slot. */
12710 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12714 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12716 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12717 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12718 /* Branches completion */
12719 clear_branch_hflags(ctx
);
12720 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12721 /* FIXME: Need to clear can_do_io. */
12722 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12723 case MIPS_HFLAG_FBNSLOT
:
12724 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12727 /* unconditional branch */
12728 if (proc_hflags
& MIPS_HFLAG_BX
) {
12729 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12731 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12733 case MIPS_HFLAG_BL
:
12734 /* blikely taken case */
12735 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12737 case MIPS_HFLAG_BC
:
12738 /* Conditional branch */
12740 TCGLabel
*l1
= gen_new_label();
12742 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12743 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12745 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12748 case MIPS_HFLAG_BR
:
12749 /* unconditional branch to register */
12750 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12751 TCGv t0
= tcg_temp_new();
12752 TCGv_i32 t1
= tcg_temp_new_i32();
12754 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12755 tcg_gen_trunc_tl_i32(t1
, t0
);
12757 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12758 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12759 tcg_gen_or_i32(hflags
, hflags
, t1
);
12760 tcg_temp_free_i32(t1
);
12762 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12764 tcg_gen_mov_tl(cpu_PC
, btarget
);
12766 if (ctx
->base
.singlestep_enabled
) {
12767 save_cpu_state(ctx
, 0);
12768 gen_helper_raise_exception_debug(cpu_env
);
12770 tcg_gen_lookup_and_goto_ptr();
12773 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12779 /* Compact Branches */
12780 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12781 int rs
, int rt
, int32_t offset
)
12783 int bcond_compute
= 0;
12784 TCGv t0
= tcg_temp_new();
12785 TCGv t1
= tcg_temp_new();
12786 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12788 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12789 #ifdef MIPS_DEBUG_DISAS
12790 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12791 "\n", ctx
->base
.pc_next
);
12793 generate_exception_end(ctx
, EXCP_RI
);
12797 /* Load needed operands and calculate btarget */
12799 /* compact branch */
12800 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12801 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12802 gen_load_gpr(t0
, rs
);
12803 gen_load_gpr(t1
, rt
);
12805 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12806 if (rs
<= rt
&& rs
== 0) {
12807 /* OPC_BEQZALC, OPC_BNEZALC */
12808 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12811 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12812 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12813 gen_load_gpr(t0
, rs
);
12814 gen_load_gpr(t1
, rt
);
12816 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12818 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12819 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12820 if (rs
== 0 || rs
== rt
) {
12821 /* OPC_BLEZALC, OPC_BGEZALC */
12822 /* OPC_BGTZALC, OPC_BLTZALC */
12823 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12825 gen_load_gpr(t0
, rs
);
12826 gen_load_gpr(t1
, rt
);
12828 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12832 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12837 /* OPC_BEQZC, OPC_BNEZC */
12838 gen_load_gpr(t0
, rs
);
12840 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12842 /* OPC_JIC, OPC_JIALC */
12843 TCGv tbase
= tcg_temp_new();
12844 TCGv toffset
= tcg_temp_new();
12846 gen_load_gpr(tbase
, rt
);
12847 tcg_gen_movi_tl(toffset
, offset
);
12848 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12849 tcg_temp_free(tbase
);
12850 tcg_temp_free(toffset
);
12854 MIPS_INVAL("Compact branch/jump");
12855 generate_exception_end(ctx
, EXCP_RI
);
12859 if (bcond_compute
== 0) {
12860 /* Uncoditional compact branch */
12863 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12866 ctx
->hflags
|= MIPS_HFLAG_BR
;
12869 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12872 ctx
->hflags
|= MIPS_HFLAG_B
;
12875 MIPS_INVAL("Compact branch/jump");
12876 generate_exception_end(ctx
, EXCP_RI
);
12880 /* Generating branch here as compact branches don't have delay slot */
12881 gen_branch(ctx
, 4);
12883 /* Conditional compact branch */
12884 TCGLabel
*fs
= gen_new_label();
12885 save_cpu_state(ctx
, 0);
12888 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12889 if (rs
== 0 && rt
!= 0) {
12891 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12892 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12894 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12897 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12900 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12901 if (rs
== 0 && rt
!= 0) {
12903 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12904 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12906 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12909 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
12912 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12913 if (rs
== 0 && rt
!= 0) {
12915 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12916 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12918 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12921 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
12924 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12925 if (rs
== 0 && rt
!= 0) {
12927 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12928 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12930 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
12933 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
12936 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12937 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12939 /* OPC_BOVC, OPC_BNVC */
12940 TCGv t2
= tcg_temp_new();
12941 TCGv t3
= tcg_temp_new();
12942 TCGv t4
= tcg_temp_new();
12943 TCGv input_overflow
= tcg_temp_new();
12945 gen_load_gpr(t0
, rs
);
12946 gen_load_gpr(t1
, rt
);
12947 tcg_gen_ext32s_tl(t2
, t0
);
12948 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
12949 tcg_gen_ext32s_tl(t3
, t1
);
12950 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
12951 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
12953 tcg_gen_add_tl(t4
, t2
, t3
);
12954 tcg_gen_ext32s_tl(t4
, t4
);
12955 tcg_gen_xor_tl(t2
, t2
, t3
);
12956 tcg_gen_xor_tl(t3
, t4
, t3
);
12957 tcg_gen_andc_tl(t2
, t3
, t2
);
12958 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12959 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12960 if (opc
== OPC_BOVC
) {
12962 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12965 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12967 tcg_temp_free(input_overflow
);
12971 } else if (rs
< rt
&& rs
== 0) {
12972 /* OPC_BEQZALC, OPC_BNEZALC */
12973 if (opc
== OPC_BEQZALC
) {
12975 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12978 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12981 /* OPC_BEQC, OPC_BNEC */
12982 if (opc
== OPC_BEQC
) {
12984 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12987 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12992 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12995 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12998 MIPS_INVAL("Compact conditional branch/jump");
12999 generate_exception_end(ctx
, EXCP_RI
);
13003 /* Generating branch here as compact branches don't have delay slot */
13004 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13007 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13015 /* ISA extensions (ASEs) */
13016 /* MIPS16 extension to MIPS32 */
13018 /* MIPS16 major opcodes */
13020 M16_OPC_ADDIUSP
= 0x00,
13021 M16_OPC_ADDIUPC
= 0x01,
13023 M16_OPC_JAL
= 0x03,
13024 M16_OPC_BEQZ
= 0x04,
13025 M16_OPC_BNEQZ
= 0x05,
13026 M16_OPC_SHIFT
= 0x06,
13028 M16_OPC_RRIA
= 0x08,
13029 M16_OPC_ADDIU8
= 0x09,
13030 M16_OPC_SLTI
= 0x0a,
13031 M16_OPC_SLTIU
= 0x0b,
13034 M16_OPC_CMPI
= 0x0e,
13038 M16_OPC_LWSP
= 0x12,
13040 M16_OPC_LBU
= 0x14,
13041 M16_OPC_LHU
= 0x15,
13042 M16_OPC_LWPC
= 0x16,
13043 M16_OPC_LWU
= 0x17,
13046 M16_OPC_SWSP
= 0x1a,
13048 M16_OPC_RRR
= 0x1c,
13050 M16_OPC_EXTEND
= 0x1e,
13054 /* I8 funct field */
13073 /* RR funct field */
13107 /* I64 funct field */
13115 I64_DADDIUPC
= 0x6,
13119 /* RR ry field for CNVT */
13121 RR_RY_CNVT_ZEB
= 0x0,
13122 RR_RY_CNVT_ZEH
= 0x1,
13123 RR_RY_CNVT_ZEW
= 0x2,
13124 RR_RY_CNVT_SEB
= 0x4,
13125 RR_RY_CNVT_SEH
= 0x5,
13126 RR_RY_CNVT_SEW
= 0x6,
13129 static int xlat (int r
)
13131 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13136 static void gen_mips16_save (DisasContext
*ctx
,
13137 int xsregs
, int aregs
,
13138 int do_ra
, int do_s0
, int do_s1
,
13141 TCGv t0
= tcg_temp_new();
13142 TCGv t1
= tcg_temp_new();
13143 TCGv t2
= tcg_temp_new();
13173 generate_exception_end(ctx
, EXCP_RI
);
13179 gen_base_offset_addr(ctx
, t0
, 29, 12);
13180 gen_load_gpr(t1
, 7);
13181 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13184 gen_base_offset_addr(ctx
, t0
, 29, 8);
13185 gen_load_gpr(t1
, 6);
13186 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13189 gen_base_offset_addr(ctx
, t0
, 29, 4);
13190 gen_load_gpr(t1
, 5);
13191 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13194 gen_base_offset_addr(ctx
, t0
, 29, 0);
13195 gen_load_gpr(t1
, 4);
13196 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13199 gen_load_gpr(t0
, 29);
13201 #define DECR_AND_STORE(reg) do { \
13202 tcg_gen_movi_tl(t2, -4); \
13203 gen_op_addr_add(ctx, t0, t0, t2); \
13204 gen_load_gpr(t1, reg); \
13205 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13209 DECR_AND_STORE(31);
13214 DECR_AND_STORE(30);
13217 DECR_AND_STORE(23);
13220 DECR_AND_STORE(22);
13223 DECR_AND_STORE(21);
13226 DECR_AND_STORE(20);
13229 DECR_AND_STORE(19);
13232 DECR_AND_STORE(18);
13236 DECR_AND_STORE(17);
13239 DECR_AND_STORE(16);
13269 generate_exception_end(ctx
, EXCP_RI
);
13285 #undef DECR_AND_STORE
13287 tcg_gen_movi_tl(t2
, -framesize
);
13288 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13294 static void gen_mips16_restore (DisasContext
*ctx
,
13295 int xsregs
, int aregs
,
13296 int do_ra
, int do_s0
, int do_s1
,
13300 TCGv t0
= tcg_temp_new();
13301 TCGv t1
= tcg_temp_new();
13302 TCGv t2
= tcg_temp_new();
13304 tcg_gen_movi_tl(t2
, framesize
);
13305 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13307 #define DECR_AND_LOAD(reg) do { \
13308 tcg_gen_movi_tl(t2, -4); \
13309 gen_op_addr_add(ctx, t0, t0, t2); \
13310 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13311 gen_store_gpr(t1, reg); \
13375 generate_exception_end(ctx
, EXCP_RI
);
13391 #undef DECR_AND_LOAD
13393 tcg_gen_movi_tl(t2
, framesize
);
13394 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13400 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
13401 int is_64_bit
, int extended
)
13405 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13406 generate_exception_end(ctx
, EXCP_RI
);
13410 t0
= tcg_temp_new();
13412 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13413 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13415 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13421 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13424 TCGv_i32 t0
= tcg_const_i32(op
);
13425 TCGv t1
= tcg_temp_new();
13426 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13427 gen_helper_cache(cpu_env
, t1
, t0
);
13430 #if defined(TARGET_MIPS64)
13431 static void decode_i64_mips16 (DisasContext
*ctx
,
13432 int ry
, int funct
, int16_t offset
,
13437 check_insn(ctx
, ISA_MIPS3
);
13438 check_mips_64(ctx
);
13439 offset
= extended
? offset
: offset
<< 3;
13440 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13443 check_insn(ctx
, ISA_MIPS3
);
13444 check_mips_64(ctx
);
13445 offset
= extended
? offset
: offset
<< 3;
13446 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13449 check_insn(ctx
, ISA_MIPS3
);
13450 check_mips_64(ctx
);
13451 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13452 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13455 check_insn(ctx
, ISA_MIPS3
);
13456 check_mips_64(ctx
);
13457 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13458 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13461 check_insn(ctx
, ISA_MIPS3
);
13462 check_mips_64(ctx
);
13463 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13464 generate_exception_end(ctx
, EXCP_RI
);
13466 offset
= extended
? offset
: offset
<< 3;
13467 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13471 check_insn(ctx
, ISA_MIPS3
);
13472 check_mips_64(ctx
);
13473 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13474 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13477 check_insn(ctx
, ISA_MIPS3
);
13478 check_mips_64(ctx
);
13479 offset
= extended
? offset
: offset
<< 2;
13480 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13483 check_insn(ctx
, ISA_MIPS3
);
13484 check_mips_64(ctx
);
13485 offset
= extended
? offset
: offset
<< 2;
13486 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13492 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13494 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13495 int op
, rx
, ry
, funct
, sa
;
13496 int16_t imm
, offset
;
13498 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13499 op
= (ctx
->opcode
>> 11) & 0x1f;
13500 sa
= (ctx
->opcode
>> 22) & 0x1f;
13501 funct
= (ctx
->opcode
>> 8) & 0x7;
13502 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13503 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13504 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13505 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13506 | (ctx
->opcode
& 0x1f));
13508 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13511 case M16_OPC_ADDIUSP
:
13512 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13514 case M16_OPC_ADDIUPC
:
13515 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13518 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13519 /* No delay slot, so just process as a normal instruction */
13522 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13523 /* No delay slot, so just process as a normal instruction */
13525 case M16_OPC_BNEQZ
:
13526 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13527 /* No delay slot, so just process as a normal instruction */
13529 case M16_OPC_SHIFT
:
13530 switch (ctx
->opcode
& 0x3) {
13532 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13535 #if defined(TARGET_MIPS64)
13536 check_mips_64(ctx
);
13537 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13539 generate_exception_end(ctx
, EXCP_RI
);
13543 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13546 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13550 #if defined(TARGET_MIPS64)
13552 check_insn(ctx
, ISA_MIPS3
);
13553 check_mips_64(ctx
);
13554 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13558 imm
= ctx
->opcode
& 0xf;
13559 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13560 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13561 imm
= (int16_t) (imm
<< 1) >> 1;
13562 if ((ctx
->opcode
>> 4) & 0x1) {
13563 #if defined(TARGET_MIPS64)
13564 check_mips_64(ctx
);
13565 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13567 generate_exception_end(ctx
, EXCP_RI
);
13570 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13573 case M16_OPC_ADDIU8
:
13574 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13577 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13579 case M16_OPC_SLTIU
:
13580 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13585 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13588 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13591 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13594 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13597 check_insn(ctx
, ISA_MIPS32
);
13599 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13600 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13601 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13602 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13603 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13604 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13605 | (ctx
->opcode
& 0xf)) << 3;
13607 if (ctx
->opcode
& (1 << 7)) {
13608 gen_mips16_save(ctx
, xsregs
, aregs
,
13609 do_ra
, do_s0
, do_s1
,
13612 gen_mips16_restore(ctx
, xsregs
, aregs
,
13613 do_ra
, do_s0
, do_s1
,
13619 generate_exception_end(ctx
, EXCP_RI
);
13624 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13627 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13629 #if defined(TARGET_MIPS64)
13631 check_insn(ctx
, ISA_MIPS3
);
13632 check_mips_64(ctx
);
13633 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13637 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13640 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13643 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13646 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13649 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13652 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13655 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13657 #if defined(TARGET_MIPS64)
13659 check_insn(ctx
, ISA_MIPS3
);
13660 check_mips_64(ctx
);
13661 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13665 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13668 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13671 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13674 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13676 #if defined(TARGET_MIPS64)
13678 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13682 generate_exception_end(ctx
, EXCP_RI
);
13689 static inline bool is_uhi(int sdbbp_code
)
13691 #ifdef CONFIG_USER_ONLY
13694 return semihosting_enabled() && sdbbp_code
== 1;
13698 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
13702 int op
, cnvt_op
, op1
, offset
;
13706 op
= (ctx
->opcode
>> 11) & 0x1f;
13707 sa
= (ctx
->opcode
>> 2) & 0x7;
13708 sa
= sa
== 0 ? 8 : sa
;
13709 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13710 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13711 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13712 op1
= offset
= ctx
->opcode
& 0x1f;
13717 case M16_OPC_ADDIUSP
:
13719 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13721 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13724 case M16_OPC_ADDIUPC
:
13725 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13728 offset
= (ctx
->opcode
& 0x7ff) << 1;
13729 offset
= (int16_t)(offset
<< 4) >> 4;
13730 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13731 /* No delay slot, so just process as a normal instruction */
13734 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13735 offset
= (((ctx
->opcode
& 0x1f) << 21)
13736 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13738 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13739 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13743 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13744 ((int8_t)ctx
->opcode
) << 1, 0);
13745 /* No delay slot, so just process as a normal instruction */
13747 case M16_OPC_BNEQZ
:
13748 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13749 ((int8_t)ctx
->opcode
) << 1, 0);
13750 /* No delay slot, so just process as a normal instruction */
13752 case M16_OPC_SHIFT
:
13753 switch (ctx
->opcode
& 0x3) {
13755 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13758 #if defined(TARGET_MIPS64)
13759 check_insn(ctx
, ISA_MIPS3
);
13760 check_mips_64(ctx
);
13761 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13763 generate_exception_end(ctx
, EXCP_RI
);
13767 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13770 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13774 #if defined(TARGET_MIPS64)
13776 check_insn(ctx
, ISA_MIPS3
);
13777 check_mips_64(ctx
);
13778 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13783 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13785 if ((ctx
->opcode
>> 4) & 1) {
13786 #if defined(TARGET_MIPS64)
13787 check_insn(ctx
, ISA_MIPS3
);
13788 check_mips_64(ctx
);
13789 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13791 generate_exception_end(ctx
, EXCP_RI
);
13794 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13798 case M16_OPC_ADDIU8
:
13800 int16_t imm
= (int8_t) ctx
->opcode
;
13802 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13807 int16_t imm
= (uint8_t) ctx
->opcode
;
13808 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13811 case M16_OPC_SLTIU
:
13813 int16_t imm
= (uint8_t) ctx
->opcode
;
13814 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13821 funct
= (ctx
->opcode
>> 8) & 0x7;
13824 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13825 ((int8_t)ctx
->opcode
) << 1, 0);
13828 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13829 ((int8_t)ctx
->opcode
) << 1, 0);
13832 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13835 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13836 ((int8_t)ctx
->opcode
) << 3);
13839 check_insn(ctx
, ISA_MIPS32
);
13841 int do_ra
= ctx
->opcode
& (1 << 6);
13842 int do_s0
= ctx
->opcode
& (1 << 5);
13843 int do_s1
= ctx
->opcode
& (1 << 4);
13844 int framesize
= ctx
->opcode
& 0xf;
13846 if (framesize
== 0) {
13849 framesize
= framesize
<< 3;
13852 if (ctx
->opcode
& (1 << 7)) {
13853 gen_mips16_save(ctx
, 0, 0,
13854 do_ra
, do_s0
, do_s1
, framesize
);
13856 gen_mips16_restore(ctx
, 0, 0,
13857 do_ra
, do_s0
, do_s1
, framesize
);
13863 int rz
= xlat(ctx
->opcode
& 0x7);
13865 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13866 ((ctx
->opcode
>> 5) & 0x7);
13867 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13871 reg32
= ctx
->opcode
& 0x1f;
13872 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13875 generate_exception_end(ctx
, EXCP_RI
);
13882 int16_t imm
= (uint8_t) ctx
->opcode
;
13884 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13889 int16_t imm
= (uint8_t) ctx
->opcode
;
13890 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13893 #if defined(TARGET_MIPS64)
13895 check_insn(ctx
, ISA_MIPS3
);
13896 check_mips_64(ctx
);
13897 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
13901 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13904 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
13907 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13910 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
13913 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13916 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
13919 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
13921 #if defined (TARGET_MIPS64)
13923 check_insn(ctx
, ISA_MIPS3
);
13924 check_mips_64(ctx
);
13925 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
13929 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13932 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
13935 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
13938 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
13942 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
13945 switch (ctx
->opcode
& 0x3) {
13947 mips32_op
= OPC_ADDU
;
13950 mips32_op
= OPC_SUBU
;
13952 #if defined(TARGET_MIPS64)
13954 mips32_op
= OPC_DADDU
;
13955 check_insn(ctx
, ISA_MIPS3
);
13956 check_mips_64(ctx
);
13959 mips32_op
= OPC_DSUBU
;
13960 check_insn(ctx
, ISA_MIPS3
);
13961 check_mips_64(ctx
);
13965 generate_exception_end(ctx
, EXCP_RI
);
13969 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
13978 int nd
= (ctx
->opcode
>> 7) & 0x1;
13979 int link
= (ctx
->opcode
>> 6) & 0x1;
13980 int ra
= (ctx
->opcode
>> 5) & 0x1;
13983 check_insn(ctx
, ISA_MIPS32
);
13992 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
13997 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
13998 gen_helper_do_semihosting(cpu_env
);
14000 /* XXX: not clear which exception should be raised
14001 * when in debug mode...
14003 check_insn(ctx
, ISA_MIPS32
);
14004 generate_exception_end(ctx
, EXCP_DBp
);
14008 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14011 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14014 generate_exception_end(ctx
, EXCP_BREAK
);
14017 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14020 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14023 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14025 #if defined (TARGET_MIPS64)
14027 check_insn(ctx
, ISA_MIPS3
);
14028 check_mips_64(ctx
);
14029 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14033 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14036 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14039 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14042 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14045 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14048 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14051 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14054 check_insn(ctx
, ISA_MIPS32
);
14056 case RR_RY_CNVT_ZEB
:
14057 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14059 case RR_RY_CNVT_ZEH
:
14060 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14062 case RR_RY_CNVT_SEB
:
14063 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14065 case RR_RY_CNVT_SEH
:
14066 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14068 #if defined (TARGET_MIPS64)
14069 case RR_RY_CNVT_ZEW
:
14070 check_insn(ctx
, ISA_MIPS64
);
14071 check_mips_64(ctx
);
14072 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14074 case RR_RY_CNVT_SEW
:
14075 check_insn(ctx
, ISA_MIPS64
);
14076 check_mips_64(ctx
);
14077 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14081 generate_exception_end(ctx
, EXCP_RI
);
14086 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14088 #if defined (TARGET_MIPS64)
14090 check_insn(ctx
, ISA_MIPS3
);
14091 check_mips_64(ctx
);
14092 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14095 check_insn(ctx
, ISA_MIPS3
);
14096 check_mips_64(ctx
);
14097 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14100 check_insn(ctx
, ISA_MIPS3
);
14101 check_mips_64(ctx
);
14102 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14105 check_insn(ctx
, ISA_MIPS3
);
14106 check_mips_64(ctx
);
14107 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14111 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14114 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14117 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14120 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14122 #if defined (TARGET_MIPS64)
14124 check_insn(ctx
, ISA_MIPS3
);
14125 check_mips_64(ctx
);
14126 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14129 check_insn(ctx
, ISA_MIPS3
);
14130 check_mips_64(ctx
);
14131 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14134 check_insn(ctx
, ISA_MIPS3
);
14135 check_mips_64(ctx
);
14136 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14139 check_insn(ctx
, ISA_MIPS3
);
14140 check_mips_64(ctx
);
14141 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14145 generate_exception_end(ctx
, EXCP_RI
);
14149 case M16_OPC_EXTEND
:
14150 decode_extended_mips16_opc(env
, ctx
);
14153 #if defined(TARGET_MIPS64)
14155 funct
= (ctx
->opcode
>> 8) & 0x7;
14156 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14160 generate_exception_end(ctx
, EXCP_RI
);
14167 /* microMIPS extension to MIPS32/MIPS64 */
14170 * microMIPS32/microMIPS64 major opcodes
14172 * 1. MIPS Architecture for Programmers Volume II-B:
14173 * The microMIPS32 Instruction Set (Revision 3.05)
14175 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14177 * 2. MIPS Architecture For Programmers Volume II-A:
14178 * The MIPS64 Instruction Set (Revision 3.51)
14208 POOL32S
= 0x16, /* MIPS64 */
14209 DADDIU32
= 0x17, /* MIPS64 */
14238 /* 0x29 is reserved */
14251 /* 0x31 is reserved */
14264 SD32
= 0x36, /* MIPS64 */
14265 LD32
= 0x37, /* MIPS64 */
14267 /* 0x39 is reserved */
14283 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14305 /* POOL32A encoding of minor opcode field */
14308 /* These opcodes are distinguished only by bits 9..6; those bits are
14309 * what are recorded below. */
14346 /* The following can be distinguished by their lower 6 bits. */
14356 /* POOL32AXF encoding of minor opcode field extension */
14359 * 1. MIPS Architecture for Programmers Volume II-B:
14360 * The microMIPS32 Instruction Set (Revision 3.05)
14362 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14364 * 2. MIPS Architecture for Programmers VolumeIV-e:
14365 * The MIPS DSP Application-Specific Extension
14366 * to the microMIPS32 Architecture (Revision 2.34)
14368 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14383 /* begin of microMIPS32 DSP */
14385 /* bits 13..12 for 0x01 */
14391 /* bits 13..12 for 0x2a */
14397 /* bits 13..12 for 0x32 */
14401 /* end of microMIPS32 DSP */
14403 /* bits 15..12 for 0x2c */
14420 /* bits 15..12 for 0x34 */
14428 /* bits 15..12 for 0x3c */
14430 JR
= 0x0, /* alias */
14438 /* bits 15..12 for 0x05 */
14442 /* bits 15..12 for 0x0d */
14454 /* bits 15..12 for 0x15 */
14460 /* bits 15..12 for 0x1d */
14464 /* bits 15..12 for 0x2d */
14469 /* bits 15..12 for 0x35 */
14476 /* POOL32B encoding of minor opcode field (bits 15..12) */
14492 /* POOL32C encoding of minor opcode field (bits 15..12) */
14513 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14526 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14539 /* POOL32F encoding of minor opcode field (bits 5..0) */
14542 /* These are the bit 7..6 values */
14551 /* These are the bit 8..6 values */
14576 MOVZ_FMT_05
= 0x05,
14610 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14617 /* POOL32Fxf encoding of minor opcode extension field */
14655 /* POOL32I encoding of minor opcode field (bits 25..21) */
14685 /* These overlap and are distinguished by bit16 of the instruction */
14694 /* POOL16A encoding of minor opcode field */
14701 /* POOL16B encoding of minor opcode field */
14708 /* POOL16C encoding of minor opcode field */
14728 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14752 /* POOL16D encoding of minor opcode field */
14759 /* POOL16E encoding of minor opcode field */
14766 static int mmreg (int r
)
14768 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14773 /* Used for 16-bit store instructions. */
14774 static int mmreg2 (int r
)
14776 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14781 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14782 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14783 #define uMIPS_RS2(op) uMIPS_RS(op)
14784 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14785 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14786 #define uMIPS_RS5(op) (op & 0x1f)
14788 /* Signed immediate */
14789 #define SIMM(op, start, width) \
14790 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14793 /* Zero-extended immediate */
14794 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14796 static void gen_addiur1sp(DisasContext
*ctx
)
14798 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14800 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14803 static void gen_addiur2(DisasContext
*ctx
)
14805 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14806 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14807 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14809 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14812 static void gen_addiusp(DisasContext
*ctx
)
14814 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14817 if (encoded
<= 1) {
14818 decoded
= 256 + encoded
;
14819 } else if (encoded
<= 255) {
14821 } else if (encoded
<= 509) {
14822 decoded
= encoded
- 512;
14824 decoded
= encoded
- 768;
14827 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14830 static void gen_addius5(DisasContext
*ctx
)
14832 int imm
= SIMM(ctx
->opcode
, 1, 4);
14833 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14835 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14838 static void gen_andi16(DisasContext
*ctx
)
14840 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14841 31, 32, 63, 64, 255, 32768, 65535 };
14842 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14843 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14844 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14846 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14849 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
14850 int base
, int16_t offset
)
14855 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14856 generate_exception_end(ctx
, EXCP_RI
);
14860 t0
= tcg_temp_new();
14862 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14864 t1
= tcg_const_tl(reglist
);
14865 t2
= tcg_const_i32(ctx
->mem_idx
);
14867 save_cpu_state(ctx
, 1);
14870 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14873 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14875 #ifdef TARGET_MIPS64
14877 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14880 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14886 tcg_temp_free_i32(t2
);
14890 static void gen_pool16c_insn(DisasContext
*ctx
)
14892 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14893 int rs
= mmreg(ctx
->opcode
& 0x7);
14895 switch (((ctx
->opcode
) >> 4) & 0x3f) {
14900 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
14906 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
14912 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
14918 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
14925 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14926 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14928 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
14937 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
14938 int offset
= ZIMM(ctx
->opcode
, 0, 4);
14940 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
14947 int reg
= ctx
->opcode
& 0x1f;
14949 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
14955 int reg
= ctx
->opcode
& 0x1f;
14956 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
14957 /* Let normal delay slot handling in our caller take us
14958 to the branch target. */
14963 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
14964 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14968 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
14969 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14973 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
14977 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
14980 generate_exception_end(ctx
, EXCP_BREAK
);
14983 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
14984 gen_helper_do_semihosting(cpu_env
);
14986 /* XXX: not clear which exception should be raised
14987 * when in debug mode...
14989 check_insn(ctx
, ISA_MIPS32
);
14990 generate_exception_end(ctx
, EXCP_DBp
);
14993 case JRADDIUSP
+ 0:
14994 case JRADDIUSP
+ 1:
14996 int imm
= ZIMM(ctx
->opcode
, 0, 5);
14997 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
14998 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
14999 /* Let normal delay slot handling in our caller take us
15000 to the branch target. */
15004 generate_exception_end(ctx
, EXCP_RI
);
15009 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15012 int rd
, rs
, re
, rt
;
15013 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15014 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15015 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15016 rd
= rd_enc
[enc_dest
];
15017 re
= re_enc
[enc_dest
];
15018 rs
= rs_rt_enc
[enc_rs
];
15019 rt
= rs_rt_enc
[enc_rt
];
15021 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15023 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15026 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15028 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15032 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15034 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15035 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15037 switch (ctx
->opcode
& 0xf) {
15039 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15042 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15046 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15047 int offset
= extract32(ctx
->opcode
, 4, 4);
15048 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15051 case R6_JRC16
: /* JRCADDIUSP */
15052 if ((ctx
->opcode
>> 4) & 1) {
15054 int imm
= extract32(ctx
->opcode
, 5, 5);
15055 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15056 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15059 rs
= extract32(ctx
->opcode
, 5, 5);
15060 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15072 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15073 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15074 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15075 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15079 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15082 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15086 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15087 int offset
= extract32(ctx
->opcode
, 4, 4);
15088 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15091 case JALRC16
: /* BREAK16, SDBBP16 */
15092 switch (ctx
->opcode
& 0x3f) {
15094 case JALRC16
+ 0x20:
15096 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15101 generate_exception(ctx
, EXCP_BREAK
);
15105 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15106 gen_helper_do_semihosting(cpu_env
);
15108 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15109 generate_exception(ctx
, EXCP_RI
);
15111 generate_exception(ctx
, EXCP_DBp
);
15118 generate_exception(ctx
, EXCP_RI
);
15123 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
15125 TCGv t0
= tcg_temp_new();
15126 TCGv t1
= tcg_temp_new();
15128 gen_load_gpr(t0
, base
);
15131 gen_load_gpr(t1
, index
);
15132 tcg_gen_shli_tl(t1
, t1
, 2);
15133 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15136 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15137 gen_store_gpr(t1
, rd
);
15143 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
15144 int base
, int16_t offset
)
15148 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15149 generate_exception_end(ctx
, EXCP_RI
);
15153 t0
= tcg_temp_new();
15154 t1
= tcg_temp_new();
15156 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15161 generate_exception_end(ctx
, EXCP_RI
);
15164 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15165 gen_store_gpr(t1
, rd
);
15166 tcg_gen_movi_tl(t1
, 4);
15167 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15168 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15169 gen_store_gpr(t1
, rd
+1);
15172 gen_load_gpr(t1
, rd
);
15173 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15174 tcg_gen_movi_tl(t1
, 4);
15175 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15176 gen_load_gpr(t1
, rd
+1);
15177 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15179 #ifdef TARGET_MIPS64
15182 generate_exception_end(ctx
, EXCP_RI
);
15185 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15186 gen_store_gpr(t1
, rd
);
15187 tcg_gen_movi_tl(t1
, 8);
15188 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15189 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15190 gen_store_gpr(t1
, rd
+1);
15193 gen_load_gpr(t1
, rd
);
15194 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15195 tcg_gen_movi_tl(t1
, 8);
15196 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15197 gen_load_gpr(t1
, rd
+1);
15198 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15206 static void gen_sync(int stype
)
15208 TCGBar tcg_mo
= TCG_BAR_SC
;
15211 case 0x4: /* SYNC_WMB */
15212 tcg_mo
|= TCG_MO_ST_ST
;
15214 case 0x10: /* SYNC_MB */
15215 tcg_mo
|= TCG_MO_ALL
;
15217 case 0x11: /* SYNC_ACQUIRE */
15218 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15220 case 0x12: /* SYNC_RELEASE */
15221 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15223 case 0x13: /* SYNC_RMB */
15224 tcg_mo
|= TCG_MO_LD_LD
;
15227 tcg_mo
|= TCG_MO_ALL
;
15231 tcg_gen_mb(tcg_mo
);
15234 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15236 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15237 int minor
= (ctx
->opcode
>> 12) & 0xf;
15238 uint32_t mips32_op
;
15240 switch (extension
) {
15242 mips32_op
= OPC_TEQ
;
15245 mips32_op
= OPC_TGE
;
15248 mips32_op
= OPC_TGEU
;
15251 mips32_op
= OPC_TLT
;
15254 mips32_op
= OPC_TLTU
;
15257 mips32_op
= OPC_TNE
;
15259 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15261 #ifndef CONFIG_USER_ONLY
15264 check_cp0_enabled(ctx
);
15266 /* Treat as NOP. */
15269 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15273 check_cp0_enabled(ctx
);
15275 TCGv t0
= tcg_temp_new();
15277 gen_load_gpr(t0
, rt
);
15278 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15284 switch (minor
& 3) {
15286 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15289 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15292 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15295 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15298 goto pool32axf_invalid
;
15302 switch (minor
& 3) {
15304 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15307 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15310 goto pool32axf_invalid
;
15316 check_insn(ctx
, ISA_MIPS32R6
);
15317 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15320 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15323 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15326 mips32_op
= OPC_CLO
;
15329 mips32_op
= OPC_CLZ
;
15331 check_insn(ctx
, ISA_MIPS32
);
15332 gen_cl(ctx
, mips32_op
, rt
, rs
);
15335 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15336 gen_rdhwr(ctx
, rt
, rs
, 0);
15339 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15343 mips32_op
= OPC_MULT
;
15346 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15347 mips32_op
= OPC_MULTU
;
15350 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15351 mips32_op
= OPC_DIV
;
15354 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15355 mips32_op
= OPC_DIVU
;
15358 check_insn(ctx
, ISA_MIPS32
);
15359 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15362 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15363 mips32_op
= OPC_MADD
;
15366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15367 mips32_op
= OPC_MADDU
;
15370 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15371 mips32_op
= OPC_MSUB
;
15374 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15375 mips32_op
= OPC_MSUBU
;
15377 check_insn(ctx
, ISA_MIPS32
);
15378 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15381 goto pool32axf_invalid
;
15392 generate_exception_err(ctx
, EXCP_CpU
, 2);
15395 goto pool32axf_invalid
;
15400 case JALR
: /* JALRC */
15401 case JALR_HB
: /* JALRC_HB */
15402 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15403 /* JALRC, JALRC_HB */
15404 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15406 /* JALR, JALR_HB */
15407 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15408 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15413 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15414 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15415 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15418 goto pool32axf_invalid
;
15424 check_cp0_enabled(ctx
);
15425 check_insn(ctx
, ISA_MIPS32R2
);
15426 gen_load_srsgpr(rs
, rt
);
15429 check_cp0_enabled(ctx
);
15430 check_insn(ctx
, ISA_MIPS32R2
);
15431 gen_store_srsgpr(rs
, rt
);
15434 goto pool32axf_invalid
;
15437 #ifndef CONFIG_USER_ONLY
15441 mips32_op
= OPC_TLBP
;
15444 mips32_op
= OPC_TLBR
;
15447 mips32_op
= OPC_TLBWI
;
15450 mips32_op
= OPC_TLBWR
;
15453 mips32_op
= OPC_TLBINV
;
15456 mips32_op
= OPC_TLBINVF
;
15459 mips32_op
= OPC_WAIT
;
15462 mips32_op
= OPC_DERET
;
15465 mips32_op
= OPC_ERET
;
15467 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15470 goto pool32axf_invalid
;
15476 check_cp0_enabled(ctx
);
15478 TCGv t0
= tcg_temp_new();
15480 save_cpu_state(ctx
, 1);
15481 gen_helper_di(t0
, cpu_env
);
15482 gen_store_gpr(t0
, rs
);
15483 /* Stop translation as we may have switched the execution mode */
15484 ctx
->base
.is_jmp
= DISAS_STOP
;
15489 check_cp0_enabled(ctx
);
15491 TCGv t0
= tcg_temp_new();
15493 save_cpu_state(ctx
, 1);
15494 gen_helper_ei(t0
, cpu_env
);
15495 gen_store_gpr(t0
, rs
);
15496 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15497 of translated code to check for pending interrupts. */
15498 gen_save_pc(ctx
->base
.pc_next
+ 4);
15499 ctx
->base
.is_jmp
= DISAS_EXIT
;
15504 goto pool32axf_invalid
;
15511 gen_sync(extract32(ctx
->opcode
, 16, 5));
15514 generate_exception_end(ctx
, EXCP_SYSCALL
);
15517 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15518 gen_helper_do_semihosting(cpu_env
);
15520 check_insn(ctx
, ISA_MIPS32
);
15521 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15522 generate_exception_end(ctx
, EXCP_RI
);
15524 generate_exception_end(ctx
, EXCP_DBp
);
15529 goto pool32axf_invalid
;
15533 switch (minor
& 3) {
15535 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15538 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15541 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15544 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15547 goto pool32axf_invalid
;
15551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15554 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15557 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15560 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15563 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15566 goto pool32axf_invalid
;
15571 MIPS_INVAL("pool32axf");
15572 generate_exception_end(ctx
, EXCP_RI
);
15577 /* Values for microMIPS fmt field. Variable-width, depending on which
15578 formats the instruction supports. */
15597 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15599 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15600 uint32_t mips32_op
;
15602 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15603 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15604 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15606 switch (extension
) {
15607 case FLOAT_1BIT_FMT(CFC1
, 0):
15608 mips32_op
= OPC_CFC1
;
15610 case FLOAT_1BIT_FMT(CTC1
, 0):
15611 mips32_op
= OPC_CTC1
;
15613 case FLOAT_1BIT_FMT(MFC1
, 0):
15614 mips32_op
= OPC_MFC1
;
15616 case FLOAT_1BIT_FMT(MTC1
, 0):
15617 mips32_op
= OPC_MTC1
;
15619 case FLOAT_1BIT_FMT(MFHC1
, 0):
15620 mips32_op
= OPC_MFHC1
;
15622 case FLOAT_1BIT_FMT(MTHC1
, 0):
15623 mips32_op
= OPC_MTHC1
;
15625 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15628 /* Reciprocal square root */
15629 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15630 mips32_op
= OPC_RSQRT_S
;
15632 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15633 mips32_op
= OPC_RSQRT_D
;
15637 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15638 mips32_op
= OPC_SQRT_S
;
15640 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15641 mips32_op
= OPC_SQRT_D
;
15645 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15646 mips32_op
= OPC_RECIP_S
;
15648 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15649 mips32_op
= OPC_RECIP_D
;
15653 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15654 mips32_op
= OPC_FLOOR_L_S
;
15656 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15657 mips32_op
= OPC_FLOOR_L_D
;
15659 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15660 mips32_op
= OPC_FLOOR_W_S
;
15662 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15663 mips32_op
= OPC_FLOOR_W_D
;
15667 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15668 mips32_op
= OPC_CEIL_L_S
;
15670 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15671 mips32_op
= OPC_CEIL_L_D
;
15673 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15674 mips32_op
= OPC_CEIL_W_S
;
15676 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15677 mips32_op
= OPC_CEIL_W_D
;
15681 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15682 mips32_op
= OPC_TRUNC_L_S
;
15684 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15685 mips32_op
= OPC_TRUNC_L_D
;
15687 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15688 mips32_op
= OPC_TRUNC_W_S
;
15690 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15691 mips32_op
= OPC_TRUNC_W_D
;
15695 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15696 mips32_op
= OPC_ROUND_L_S
;
15698 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15699 mips32_op
= OPC_ROUND_L_D
;
15701 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15702 mips32_op
= OPC_ROUND_W_S
;
15704 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15705 mips32_op
= OPC_ROUND_W_D
;
15708 /* Integer to floating-point conversion */
15709 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15710 mips32_op
= OPC_CVT_L_S
;
15712 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15713 mips32_op
= OPC_CVT_L_D
;
15715 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15716 mips32_op
= OPC_CVT_W_S
;
15718 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15719 mips32_op
= OPC_CVT_W_D
;
15722 /* Paired-foo conversions */
15723 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15724 mips32_op
= OPC_CVT_S_PL
;
15726 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15727 mips32_op
= OPC_CVT_S_PU
;
15729 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15730 mips32_op
= OPC_CVT_PW_PS
;
15732 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15733 mips32_op
= OPC_CVT_PS_PW
;
15736 /* Floating-point moves */
15737 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15738 mips32_op
= OPC_MOV_S
;
15740 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15741 mips32_op
= OPC_MOV_D
;
15743 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15744 mips32_op
= OPC_MOV_PS
;
15747 /* Absolute value */
15748 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15749 mips32_op
= OPC_ABS_S
;
15751 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15752 mips32_op
= OPC_ABS_D
;
15754 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15755 mips32_op
= OPC_ABS_PS
;
15759 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15760 mips32_op
= OPC_NEG_S
;
15762 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15763 mips32_op
= OPC_NEG_D
;
15765 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15766 mips32_op
= OPC_NEG_PS
;
15769 /* Reciprocal square root step */
15770 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15771 mips32_op
= OPC_RSQRT1_S
;
15773 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15774 mips32_op
= OPC_RSQRT1_D
;
15776 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15777 mips32_op
= OPC_RSQRT1_PS
;
15780 /* Reciprocal step */
15781 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15782 mips32_op
= OPC_RECIP1_S
;
15784 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15785 mips32_op
= OPC_RECIP1_S
;
15787 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15788 mips32_op
= OPC_RECIP1_PS
;
15791 /* Conversions from double */
15792 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15793 mips32_op
= OPC_CVT_D_S
;
15795 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15796 mips32_op
= OPC_CVT_D_W
;
15798 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15799 mips32_op
= OPC_CVT_D_L
;
15802 /* Conversions from single */
15803 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15804 mips32_op
= OPC_CVT_S_D
;
15806 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15807 mips32_op
= OPC_CVT_S_W
;
15809 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15810 mips32_op
= OPC_CVT_S_L
;
15812 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15815 /* Conditional moves on floating-point codes */
15816 case COND_FLOAT_MOV(MOVT
, 0):
15817 case COND_FLOAT_MOV(MOVT
, 1):
15818 case COND_FLOAT_MOV(MOVT
, 2):
15819 case COND_FLOAT_MOV(MOVT
, 3):
15820 case COND_FLOAT_MOV(MOVT
, 4):
15821 case COND_FLOAT_MOV(MOVT
, 5):
15822 case COND_FLOAT_MOV(MOVT
, 6):
15823 case COND_FLOAT_MOV(MOVT
, 7):
15824 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15825 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15827 case COND_FLOAT_MOV(MOVF
, 0):
15828 case COND_FLOAT_MOV(MOVF
, 1):
15829 case COND_FLOAT_MOV(MOVF
, 2):
15830 case COND_FLOAT_MOV(MOVF
, 3):
15831 case COND_FLOAT_MOV(MOVF
, 4):
15832 case COND_FLOAT_MOV(MOVF
, 5):
15833 case COND_FLOAT_MOV(MOVF
, 6):
15834 case COND_FLOAT_MOV(MOVF
, 7):
15835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15836 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15839 MIPS_INVAL("pool32fxf");
15840 generate_exception_end(ctx
, EXCP_RI
);
15845 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15849 int rt
, rs
, rd
, rr
;
15851 uint32_t op
, minor
, minor2
, mips32_op
;
15852 uint32_t cond
, fmt
, cc
;
15854 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15855 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15857 rt
= (ctx
->opcode
>> 21) & 0x1f;
15858 rs
= (ctx
->opcode
>> 16) & 0x1f;
15859 rd
= (ctx
->opcode
>> 11) & 0x1f;
15860 rr
= (ctx
->opcode
>> 6) & 0x1f;
15861 imm
= (int16_t) ctx
->opcode
;
15863 op
= (ctx
->opcode
>> 26) & 0x3f;
15866 minor
= ctx
->opcode
& 0x3f;
15869 minor
= (ctx
->opcode
>> 6) & 0xf;
15872 mips32_op
= OPC_SLL
;
15875 mips32_op
= OPC_SRA
;
15878 mips32_op
= OPC_SRL
;
15881 mips32_op
= OPC_ROTR
;
15883 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15886 check_insn(ctx
, ISA_MIPS32R6
);
15887 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15890 check_insn(ctx
, ISA_MIPS32R6
);
15891 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15894 check_insn(ctx
, ISA_MIPS32R6
);
15895 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
15898 goto pool32a_invalid
;
15902 minor
= (ctx
->opcode
>> 6) & 0xf;
15906 mips32_op
= OPC_ADD
;
15909 mips32_op
= OPC_ADDU
;
15912 mips32_op
= OPC_SUB
;
15915 mips32_op
= OPC_SUBU
;
15918 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15919 mips32_op
= OPC_MUL
;
15921 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
15925 mips32_op
= OPC_SLLV
;
15928 mips32_op
= OPC_SRLV
;
15931 mips32_op
= OPC_SRAV
;
15934 mips32_op
= OPC_ROTRV
;
15936 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
15938 /* Logical operations */
15940 mips32_op
= OPC_AND
;
15943 mips32_op
= OPC_OR
;
15946 mips32_op
= OPC_NOR
;
15949 mips32_op
= OPC_XOR
;
15951 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
15953 /* Set less than */
15955 mips32_op
= OPC_SLT
;
15958 mips32_op
= OPC_SLTU
;
15960 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
15963 goto pool32a_invalid
;
15967 minor
= (ctx
->opcode
>> 6) & 0xf;
15969 /* Conditional moves */
15970 case MOVN
: /* MUL */
15971 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15973 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
15976 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
15979 case MOVZ
: /* MUH */
15980 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15982 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
15985 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
15989 check_insn(ctx
, ISA_MIPS32R6
);
15990 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
15993 check_insn(ctx
, ISA_MIPS32R6
);
15994 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
15996 case LWXS
: /* DIV */
15997 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15999 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16002 gen_ldxs(ctx
, rs
, rt
, rd
);
16006 check_insn(ctx
, ISA_MIPS32R6
);
16007 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16010 check_insn(ctx
, ISA_MIPS32R6
);
16011 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16014 check_insn(ctx
, ISA_MIPS32R6
);
16015 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16018 goto pool32a_invalid
;
16022 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16025 check_insn(ctx
, ISA_MIPS32R6
);
16026 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16027 extract32(ctx
->opcode
, 9, 2));
16030 check_insn(ctx
, ISA_MIPS32R6
);
16031 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16034 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16037 gen_pool32axf(env
, ctx
, rt
, rs
);
16040 generate_exception_end(ctx
, EXCP_BREAK
);
16043 check_insn(ctx
, ISA_MIPS32R6
);
16044 generate_exception_end(ctx
, EXCP_RI
);
16048 MIPS_INVAL("pool32a");
16049 generate_exception_end(ctx
, EXCP_RI
);
16054 minor
= (ctx
->opcode
>> 12) & 0xf;
16057 check_cp0_enabled(ctx
);
16058 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16059 gen_cache_operation(ctx
, rt
, rs
, imm
);
16064 /* COP2: Not implemented. */
16065 generate_exception_err(ctx
, EXCP_CpU
, 2);
16067 #ifdef TARGET_MIPS64
16070 check_insn(ctx
, ISA_MIPS3
);
16071 check_mips_64(ctx
);
16076 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16078 #ifdef TARGET_MIPS64
16081 check_insn(ctx
, ISA_MIPS3
);
16082 check_mips_64(ctx
);
16087 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16090 MIPS_INVAL("pool32b");
16091 generate_exception_end(ctx
, EXCP_RI
);
16096 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16097 minor
= ctx
->opcode
& 0x3f;
16098 check_cp1_enabled(ctx
);
16101 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16102 mips32_op
= OPC_ALNV_PS
;
16105 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16106 mips32_op
= OPC_MADD_S
;
16109 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16110 mips32_op
= OPC_MADD_D
;
16113 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16114 mips32_op
= OPC_MADD_PS
;
16117 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16118 mips32_op
= OPC_MSUB_S
;
16121 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16122 mips32_op
= OPC_MSUB_D
;
16125 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16126 mips32_op
= OPC_MSUB_PS
;
16129 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16130 mips32_op
= OPC_NMADD_S
;
16133 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16134 mips32_op
= OPC_NMADD_D
;
16137 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16138 mips32_op
= OPC_NMADD_PS
;
16141 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16142 mips32_op
= OPC_NMSUB_S
;
16145 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16146 mips32_op
= OPC_NMSUB_D
;
16149 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16150 mips32_op
= OPC_NMSUB_PS
;
16152 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16154 case CABS_COND_FMT
:
16155 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16156 cond
= (ctx
->opcode
>> 6) & 0xf;
16157 cc
= (ctx
->opcode
>> 13) & 0x7;
16158 fmt
= (ctx
->opcode
>> 10) & 0x3;
16161 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16164 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16167 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16170 goto pool32f_invalid
;
16174 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16175 cond
= (ctx
->opcode
>> 6) & 0xf;
16176 cc
= (ctx
->opcode
>> 13) & 0x7;
16177 fmt
= (ctx
->opcode
>> 10) & 0x3;
16180 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16183 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16186 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16189 goto pool32f_invalid
;
16193 check_insn(ctx
, ISA_MIPS32R6
);
16194 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16197 check_insn(ctx
, ISA_MIPS32R6
);
16198 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16201 gen_pool32fxf(ctx
, rt
, rs
);
16205 switch ((ctx
->opcode
>> 6) & 0x7) {
16207 mips32_op
= OPC_PLL_PS
;
16210 mips32_op
= OPC_PLU_PS
;
16213 mips32_op
= OPC_PUL_PS
;
16216 mips32_op
= OPC_PUU_PS
;
16219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16220 mips32_op
= OPC_CVT_PS_S
;
16222 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16225 goto pool32f_invalid
;
16229 check_insn(ctx
, ISA_MIPS32R6
);
16230 switch ((ctx
->opcode
>> 9) & 0x3) {
16232 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16235 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16238 goto pool32f_invalid
;
16243 switch ((ctx
->opcode
>> 6) & 0x7) {
16245 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16246 mips32_op
= OPC_LWXC1
;
16249 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16250 mips32_op
= OPC_SWXC1
;
16253 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16254 mips32_op
= OPC_LDXC1
;
16257 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16258 mips32_op
= OPC_SDXC1
;
16261 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16262 mips32_op
= OPC_LUXC1
;
16265 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16266 mips32_op
= OPC_SUXC1
;
16268 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16271 goto pool32f_invalid
;
16275 check_insn(ctx
, ISA_MIPS32R6
);
16276 switch ((ctx
->opcode
>> 9) & 0x3) {
16278 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16281 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16284 goto pool32f_invalid
;
16289 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16290 fmt
= (ctx
->opcode
>> 9) & 0x3;
16291 switch ((ctx
->opcode
>> 6) & 0x7) {
16295 mips32_op
= OPC_RSQRT2_S
;
16298 mips32_op
= OPC_RSQRT2_D
;
16301 mips32_op
= OPC_RSQRT2_PS
;
16304 goto pool32f_invalid
;
16310 mips32_op
= OPC_RECIP2_S
;
16313 mips32_op
= OPC_RECIP2_D
;
16316 mips32_op
= OPC_RECIP2_PS
;
16319 goto pool32f_invalid
;
16323 mips32_op
= OPC_ADDR_PS
;
16326 mips32_op
= OPC_MULR_PS
;
16328 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16331 goto pool32f_invalid
;
16335 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16336 cc
= (ctx
->opcode
>> 13) & 0x7;
16337 fmt
= (ctx
->opcode
>> 9) & 0x3;
16338 switch ((ctx
->opcode
>> 6) & 0x7) {
16339 case MOVF_FMT
: /* RINT_FMT */
16340 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16344 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16347 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16350 goto pool32f_invalid
;
16356 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16359 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16363 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16366 goto pool32f_invalid
;
16370 case MOVT_FMT
: /* CLASS_FMT */
16371 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16375 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16378 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16381 goto pool32f_invalid
;
16387 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16390 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16394 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16397 goto pool32f_invalid
;
16402 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16405 goto pool32f_invalid
;
16408 #define FINSN_3ARG_SDPS(prfx) \
16409 switch ((ctx->opcode >> 8) & 0x3) { \
16411 mips32_op = OPC_##prfx##_S; \
16414 mips32_op = OPC_##prfx##_D; \
16416 case FMT_SDPS_PS: \
16418 mips32_op = OPC_##prfx##_PS; \
16421 goto pool32f_invalid; \
16424 check_insn(ctx
, ISA_MIPS32R6
);
16425 switch ((ctx
->opcode
>> 9) & 0x3) {
16427 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16430 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16433 goto pool32f_invalid
;
16437 check_insn(ctx
, ISA_MIPS32R6
);
16438 switch ((ctx
->opcode
>> 9) & 0x3) {
16440 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16443 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16446 goto pool32f_invalid
;
16450 /* regular FP ops */
16451 switch ((ctx
->opcode
>> 6) & 0x3) {
16453 FINSN_3ARG_SDPS(ADD
);
16456 FINSN_3ARG_SDPS(SUB
);
16459 FINSN_3ARG_SDPS(MUL
);
16462 fmt
= (ctx
->opcode
>> 8) & 0x3;
16464 mips32_op
= OPC_DIV_D
;
16465 } else if (fmt
== 0) {
16466 mips32_op
= OPC_DIV_S
;
16468 goto pool32f_invalid
;
16472 goto pool32f_invalid
;
16477 switch ((ctx
->opcode
>> 6) & 0x7) {
16478 case MOVN_FMT
: /* SELEQZ_FMT */
16479 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16481 switch ((ctx
->opcode
>> 9) & 0x3) {
16483 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16486 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16489 goto pool32f_invalid
;
16493 FINSN_3ARG_SDPS(MOVN
);
16497 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16498 FINSN_3ARG_SDPS(MOVN
);
16500 case MOVZ_FMT
: /* SELNEZ_FMT */
16501 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16503 switch ((ctx
->opcode
>> 9) & 0x3) {
16505 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16508 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16511 goto pool32f_invalid
;
16515 FINSN_3ARG_SDPS(MOVZ
);
16519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16520 FINSN_3ARG_SDPS(MOVZ
);
16523 check_insn(ctx
, ISA_MIPS32R6
);
16524 switch ((ctx
->opcode
>> 9) & 0x3) {
16526 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16529 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16532 goto pool32f_invalid
;
16536 check_insn(ctx
, ISA_MIPS32R6
);
16537 switch ((ctx
->opcode
>> 9) & 0x3) {
16539 mips32_op
= OPC_MADDF_S
;
16542 mips32_op
= OPC_MADDF_D
;
16545 goto pool32f_invalid
;
16549 check_insn(ctx
, ISA_MIPS32R6
);
16550 switch ((ctx
->opcode
>> 9) & 0x3) {
16552 mips32_op
= OPC_MSUBF_S
;
16555 mips32_op
= OPC_MSUBF_D
;
16558 goto pool32f_invalid
;
16562 goto pool32f_invalid
;
16566 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16570 MIPS_INVAL("pool32f");
16571 generate_exception_end(ctx
, EXCP_RI
);
16575 generate_exception_err(ctx
, EXCP_CpU
, 1);
16579 minor
= (ctx
->opcode
>> 21) & 0x1f;
16582 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16583 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16586 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16587 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16588 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16591 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16592 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16593 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16596 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16597 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16600 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16601 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16602 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16605 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16606 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16607 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16610 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16611 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16615 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16619 case TLTI
: /* BC1EQZC */
16620 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16622 check_cp1_enabled(ctx
);
16623 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16626 mips32_op
= OPC_TLTI
;
16630 case TGEI
: /* BC1NEZC */
16631 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16633 check_cp1_enabled(ctx
);
16634 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16637 mips32_op
= OPC_TGEI
;
16642 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16643 mips32_op
= OPC_TLTIU
;
16646 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16647 mips32_op
= OPC_TGEIU
;
16649 case TNEI
: /* SYNCI */
16650 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16652 /* Break the TB to be able to sync copied instructions
16654 ctx
->base
.is_jmp
= DISAS_STOP
;
16657 mips32_op
= OPC_TNEI
;
16662 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16663 mips32_op
= OPC_TEQI
;
16665 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16670 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16671 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16672 4, rs
, 0, imm
<< 1, 0);
16673 /* Compact branches don't have a delay slot, so just let
16674 the normal delay slot handling take us to the branch
16678 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16679 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16682 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16683 /* Break the TB to be able to sync copied instructions
16685 ctx
->base
.is_jmp
= DISAS_STOP
;
16689 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16690 /* COP2: Not implemented. */
16691 generate_exception_err(ctx
, EXCP_CpU
, 2);
16694 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16695 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16698 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16699 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16702 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16703 mips32_op
= OPC_BC1FANY4
;
16706 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16707 mips32_op
= OPC_BC1TANY4
;
16710 check_insn(ctx
, ASE_MIPS3D
);
16713 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16714 check_cp1_enabled(ctx
);
16715 gen_compute_branch1(ctx
, mips32_op
,
16716 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16718 generate_exception_err(ctx
, EXCP_CpU
, 1);
16723 /* MIPS DSP: not implemented */
16726 MIPS_INVAL("pool32i");
16727 generate_exception_end(ctx
, EXCP_RI
);
16732 minor
= (ctx
->opcode
>> 12) & 0xf;
16733 offset
= sextract32(ctx
->opcode
, 0,
16734 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16738 mips32_op
= OPC_LWL
;
16741 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16742 mips32_op
= OPC_SWL
;
16745 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16746 mips32_op
= OPC_LWR
;
16749 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16750 mips32_op
= OPC_SWR
;
16752 #if defined(TARGET_MIPS64)
16754 check_insn(ctx
, ISA_MIPS3
);
16755 check_mips_64(ctx
);
16756 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16757 mips32_op
= OPC_LDL
;
16760 check_insn(ctx
, ISA_MIPS3
);
16761 check_mips_64(ctx
);
16762 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16763 mips32_op
= OPC_SDL
;
16766 check_insn(ctx
, ISA_MIPS3
);
16767 check_mips_64(ctx
);
16768 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16769 mips32_op
= OPC_LDR
;
16772 check_insn(ctx
, ISA_MIPS3
);
16773 check_mips_64(ctx
);
16774 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16775 mips32_op
= OPC_SDR
;
16778 check_insn(ctx
, ISA_MIPS3
);
16779 check_mips_64(ctx
);
16780 mips32_op
= OPC_LWU
;
16783 check_insn(ctx
, ISA_MIPS3
);
16784 check_mips_64(ctx
);
16785 mips32_op
= OPC_LLD
;
16789 mips32_op
= OPC_LL
;
16792 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16795 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16798 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
16800 #if defined(TARGET_MIPS64)
16802 check_insn(ctx
, ISA_MIPS3
);
16803 check_mips_64(ctx
);
16804 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
16809 MIPS_INVAL("pool32c ld-eva");
16810 generate_exception_end(ctx
, EXCP_RI
);
16813 check_cp0_enabled(ctx
);
16815 minor2
= (ctx
->opcode
>> 9) & 0x7;
16816 offset
= sextract32(ctx
->opcode
, 0, 9);
16819 mips32_op
= OPC_LBUE
;
16822 mips32_op
= OPC_LHUE
;
16825 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16826 mips32_op
= OPC_LWLE
;
16829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16830 mips32_op
= OPC_LWRE
;
16833 mips32_op
= OPC_LBE
;
16836 mips32_op
= OPC_LHE
;
16839 mips32_op
= OPC_LLE
;
16842 mips32_op
= OPC_LWE
;
16848 MIPS_INVAL("pool32c st-eva");
16849 generate_exception_end(ctx
, EXCP_RI
);
16852 check_cp0_enabled(ctx
);
16854 minor2
= (ctx
->opcode
>> 9) & 0x7;
16855 offset
= sextract32(ctx
->opcode
, 0, 9);
16858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16859 mips32_op
= OPC_SWLE
;
16862 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16863 mips32_op
= OPC_SWRE
;
16866 /* Treat as no-op */
16867 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16868 /* hint codes 24-31 are reserved and signal RI */
16869 generate_exception(ctx
, EXCP_RI
);
16873 /* Treat as no-op */
16874 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16875 gen_cache_operation(ctx
, rt
, rs
, offset
);
16879 mips32_op
= OPC_SBE
;
16882 mips32_op
= OPC_SHE
;
16885 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, offset
);
16888 mips32_op
= OPC_SWE
;
16893 /* Treat as no-op */
16894 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16895 /* hint codes 24-31 are reserved and signal RI */
16896 generate_exception(ctx
, EXCP_RI
);
16900 MIPS_INVAL("pool32c");
16901 generate_exception_end(ctx
, EXCP_RI
);
16905 case ADDI32
: /* AUI, LUI */
16906 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16908 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
16911 mips32_op
= OPC_ADDI
;
16916 mips32_op
= OPC_ADDIU
;
16918 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16921 /* Logical operations */
16923 mips32_op
= OPC_ORI
;
16926 mips32_op
= OPC_XORI
;
16929 mips32_op
= OPC_ANDI
;
16931 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16934 /* Set less than immediate */
16936 mips32_op
= OPC_SLTI
;
16939 mips32_op
= OPC_SLTIU
;
16941 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
16944 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16945 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
16946 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
16947 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16949 case JALS32
: /* BOVC, BEQC, BEQZALC */
16950 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16953 mips32_op
= OPC_BOVC
;
16954 } else if (rs
< rt
&& rs
== 0) {
16956 mips32_op
= OPC_BEQZALC
;
16959 mips32_op
= OPC_BEQC
;
16961 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
16964 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
16965 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
16966 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16969 case BEQ32
: /* BC */
16970 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16972 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
16973 sextract32(ctx
->opcode
<< 1, 0, 27));
16976 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
16979 case BNE32
: /* BALC */
16980 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16982 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
16983 sextract32(ctx
->opcode
<< 1, 0, 27));
16986 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
16989 case J32
: /* BGTZC, BLTZC, BLTC */
16990 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16991 if (rs
== 0 && rt
!= 0) {
16993 mips32_op
= OPC_BGTZC
;
16994 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
16996 mips32_op
= OPC_BLTZC
;
16999 mips32_op
= OPC_BLTC
;
17001 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17004 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17005 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17008 case JAL32
: /* BLEZC, BGEZC, BGEC */
17009 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17010 if (rs
== 0 && rt
!= 0) {
17012 mips32_op
= OPC_BLEZC
;
17013 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17015 mips32_op
= OPC_BGEZC
;
17018 mips32_op
= OPC_BGEC
;
17020 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17023 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17024 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17025 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17028 /* Floating point (COP1) */
17030 mips32_op
= OPC_LWC1
;
17033 mips32_op
= OPC_LDC1
;
17036 mips32_op
= OPC_SWC1
;
17039 mips32_op
= OPC_SDC1
;
17041 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17043 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17044 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17045 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17046 switch ((ctx
->opcode
>> 16) & 0x1f) {
17055 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17058 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17061 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17071 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17074 generate_exception(ctx
, EXCP_RI
);
17079 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17080 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17082 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17085 case BNVC
: /* BNEC, BNEZALC */
17086 check_insn(ctx
, ISA_MIPS32R6
);
17089 mips32_op
= OPC_BNVC
;
17090 } else if (rs
< rt
&& rs
== 0) {
17092 mips32_op
= OPC_BNEZALC
;
17095 mips32_op
= OPC_BNEC
;
17097 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17099 case R6_BNEZC
: /* JIALC */
17100 check_insn(ctx
, ISA_MIPS32R6
);
17103 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17104 sextract32(ctx
->opcode
<< 1, 0, 22));
17107 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17110 case R6_BEQZC
: /* JIC */
17111 check_insn(ctx
, ISA_MIPS32R6
);
17114 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17115 sextract32(ctx
->opcode
<< 1, 0, 22));
17118 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17121 case BLEZALC
: /* BGEZALC, BGEUC */
17122 check_insn(ctx
, ISA_MIPS32R6
);
17123 if (rs
== 0 && rt
!= 0) {
17125 mips32_op
= OPC_BLEZALC
;
17126 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17128 mips32_op
= OPC_BGEZALC
;
17131 mips32_op
= OPC_BGEUC
;
17133 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17135 case BGTZALC
: /* BLTZALC, BLTUC */
17136 check_insn(ctx
, ISA_MIPS32R6
);
17137 if (rs
== 0 && rt
!= 0) {
17139 mips32_op
= OPC_BGTZALC
;
17140 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17142 mips32_op
= OPC_BLTZALC
;
17145 mips32_op
= OPC_BLTUC
;
17147 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17149 /* Loads and stores */
17151 mips32_op
= OPC_LB
;
17154 mips32_op
= OPC_LBU
;
17157 mips32_op
= OPC_LH
;
17160 mips32_op
= OPC_LHU
;
17163 mips32_op
= OPC_LW
;
17165 #ifdef TARGET_MIPS64
17167 check_insn(ctx
, ISA_MIPS3
);
17168 check_mips_64(ctx
);
17169 mips32_op
= OPC_LD
;
17172 check_insn(ctx
, ISA_MIPS3
);
17173 check_mips_64(ctx
);
17174 mips32_op
= OPC_SD
;
17178 mips32_op
= OPC_SB
;
17181 mips32_op
= OPC_SH
;
17184 mips32_op
= OPC_SW
;
17187 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17190 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17193 generate_exception_end(ctx
, EXCP_RI
);
17198 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
17202 /* make sure instructions are on a halfword boundary */
17203 if (ctx
->base
.pc_next
& 0x1) {
17204 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17205 generate_exception_end(ctx
, EXCP_AdEL
);
17209 op
= (ctx
->opcode
>> 10) & 0x3f;
17210 /* Enforce properly-sized instructions in a delay slot */
17211 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17212 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17214 /* POOL32A, POOL32B, POOL32I, POOL32C */
17216 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17218 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17220 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17222 /* LB32, LH32, LWC132, LDC132, LW32 */
17223 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17224 generate_exception_end(ctx
, EXCP_RI
);
17229 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17231 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17233 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17234 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17235 generate_exception_end(ctx
, EXCP_RI
);
17245 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17246 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17247 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17250 switch (ctx
->opcode
& 0x1) {
17258 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17259 /* In the Release 6 the register number location in
17260 * the instruction encoding has changed.
17262 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17264 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17270 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17271 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17272 int amount
= (ctx
->opcode
>> 1) & 0x7;
17274 amount
= amount
== 0 ? 8 : amount
;
17276 switch (ctx
->opcode
& 0x1) {
17285 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17289 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17290 gen_pool16c_r6_insn(ctx
);
17292 gen_pool16c_insn(ctx
);
17297 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17298 int rb
= 28; /* GP */
17299 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17301 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17305 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17306 if (ctx
->opcode
& 1) {
17307 generate_exception_end(ctx
, EXCP_RI
);
17310 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17311 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17312 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17313 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17318 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17319 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17320 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17321 offset
= (offset
== 0xf ? -1 : offset
);
17323 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17328 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17329 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17330 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17332 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17337 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17338 int rb
= 29; /* SP */
17339 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17341 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17346 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17347 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17348 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17350 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17355 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17356 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17357 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17359 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17364 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17365 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17366 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17368 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17373 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17374 int rb
= 29; /* SP */
17375 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17377 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17382 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17383 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17384 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17386 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17391 int rd
= uMIPS_RD5(ctx
->opcode
);
17392 int rs
= uMIPS_RS5(ctx
->opcode
);
17394 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17401 switch (ctx
->opcode
& 0x1) {
17411 switch (ctx
->opcode
& 0x1) {
17416 gen_addiur1sp(ctx
);
17420 case B16
: /* BC16 */
17421 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17422 sextract32(ctx
->opcode
, 0, 10) << 1,
17423 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17425 case BNEZ16
: /* BNEZC16 */
17426 case BEQZ16
: /* BEQZC16 */
17427 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17428 mmreg(uMIPS_RD(ctx
->opcode
)),
17429 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17430 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17435 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17436 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17438 imm
= (imm
== 0x7f ? -1 : imm
);
17439 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17445 generate_exception_end(ctx
, EXCP_RI
);
17448 decode_micromips32_opc(env
, ctx
);
17461 /* MAJOR, P16, and P32 pools opcodes */
17465 NM_MOVE_BALC
= 0x02,
17473 NM_P16_SHIFT
= 0x0c,
17491 NM_P_LS_U12
= 0x21,
17501 NM_P16_ADDU
= 0x2c,
17515 NM_MOVEPREV
= 0x3f,
17518 /* POOL32A instruction pool */
17520 NM_POOL32A0
= 0x00,
17521 NM_SPECIAL2
= 0x01,
17524 NM_POOL32A5
= 0x05,
17525 NM_POOL32A7
= 0x07,
17528 /* P.GP.W instruction pool */
17530 NM_ADDIUGP_W
= 0x00,
17535 /* P48I instruction pool */
17539 NM_ADDIUGP48
= 0x02,
17540 NM_ADDIUPC48
= 0x03,
17545 /* P.U12 instruction pool */
17554 NM_ADDIUNEG
= 0x08,
17561 /* POOL32F instruction pool */
17563 NM_POOL32F_0
= 0x00,
17564 NM_POOL32F_3
= 0x03,
17565 NM_POOL32F_5
= 0x05,
17568 /* POOL32S instruction pool */
17570 NM_POOL32S_0
= 0x00,
17571 NM_POOL32S_4
= 0x04,
17574 /* P.LUI instruction pool */
17580 /* P.GP.BH instruction pool */
17585 NM_ADDIUGP_B
= 0x03,
17588 NM_P_GP_CP1
= 0x06,
17591 /* P.LS.U12 instruction pool */
17596 NM_P_PREFU12
= 0x03,
17609 /* P.LS.S9 instruction pool */
17615 NM_P_LS_UAWM
= 0x05,
17618 /* P.BAL instruction pool */
17624 /* P.J instruction pool */
17627 NM_JALRC_HB
= 0x01,
17628 NM_P_BALRSC
= 0x08,
17631 /* P.BR1 instruction pool */
17639 /* P.BR2 instruction pool */
17646 /* P.BRI instruction pool */
17658 /* P16.SHIFT instruction pool */
17664 /* POOL16C instruction pool */
17666 NM_POOL16C_0
= 0x00,
17670 /* P16.A1 instruction pool */
17672 NM_ADDIUR1SP
= 0x01,
17675 /* P16.A2 instruction pool */
17678 NM_P_ADDIURS5
= 0x01,
17681 /* P16.ADDU instruction pool */
17687 /* P16.SR instruction pool */
17690 NM_RESTORE_JRC16
= 0x01,
17693 /* P16.4X4 instruction pool */
17699 /* P16.LB instruction pool */
17706 /* P16.LH instruction pool */
17713 /* P.RI instruction pool */
17716 NM_P_SYSCALL
= 0x01,
17721 /* POOL32A0 instruction pool */
17756 NM_D_E_MT_VPE
= 0x56,
17764 /* CRC32 instruction pool */
17774 /* POOL32A5 instruction pool */
17776 NM_CMP_EQ_PH
= 0x00,
17777 NM_CMP_LT_PH
= 0x08,
17778 NM_CMP_LE_PH
= 0x10,
17779 NM_CMPGU_EQ_QB
= 0x18,
17780 NM_CMPGU_LT_QB
= 0x20,
17781 NM_CMPGU_LE_QB
= 0x28,
17782 NM_CMPGDU_EQ_QB
= 0x30,
17783 NM_CMPGDU_LT_QB
= 0x38,
17784 NM_CMPGDU_LE_QB
= 0x40,
17785 NM_CMPU_EQ_QB
= 0x48,
17786 NM_CMPU_LT_QB
= 0x50,
17787 NM_CMPU_LE_QB
= 0x58,
17788 NM_ADDQ_S_W
= 0x60,
17789 NM_SUBQ_S_W
= 0x68,
17793 NM_ADDQ_S_PH
= 0x01,
17794 NM_ADDQH_R_PH
= 0x09,
17795 NM_ADDQH_R_W
= 0x11,
17796 NM_ADDU_S_QB
= 0x19,
17797 NM_ADDU_S_PH
= 0x21,
17798 NM_ADDUH_R_QB
= 0x29,
17799 NM_SHRAV_R_PH
= 0x31,
17800 NM_SHRAV_R_QB
= 0x39,
17801 NM_SUBQ_S_PH
= 0x41,
17802 NM_SUBQH_R_PH
= 0x49,
17803 NM_SUBQH_R_W
= 0x51,
17804 NM_SUBU_S_QB
= 0x59,
17805 NM_SUBU_S_PH
= 0x61,
17806 NM_SUBUH_R_QB
= 0x69,
17807 NM_SHLLV_S_PH
= 0x71,
17808 NM_PRECR_SRA_R_PH_W
= 0x79,
17810 NM_MULEU_S_PH_QBL
= 0x12,
17811 NM_MULEU_S_PH_QBR
= 0x1a,
17812 NM_MULQ_RS_PH
= 0x22,
17813 NM_MULQ_S_PH
= 0x2a,
17814 NM_MULQ_RS_W
= 0x32,
17815 NM_MULQ_S_W
= 0x3a,
17818 NM_SHRAV_R_W
= 0x5a,
17819 NM_SHRLV_PH
= 0x62,
17820 NM_SHRLV_QB
= 0x6a,
17821 NM_SHLLV_QB
= 0x72,
17822 NM_SHLLV_S_W
= 0x7a,
17826 NM_MULEQ_S_W_PHL
= 0x04,
17827 NM_MULEQ_S_W_PHR
= 0x0c,
17829 NM_MUL_S_PH
= 0x05,
17830 NM_PRECR_QB_PH
= 0x0d,
17831 NM_PRECRQ_QB_PH
= 0x15,
17832 NM_PRECRQ_PH_W
= 0x1d,
17833 NM_PRECRQ_RS_PH_W
= 0x25,
17834 NM_PRECRQU_S_QB_PH
= 0x2d,
17835 NM_PACKRL_PH
= 0x35,
17839 NM_SHRA_R_W
= 0x5e,
17840 NM_SHRA_R_PH
= 0x66,
17841 NM_SHLL_S_PH
= 0x76,
17842 NM_SHLL_S_W
= 0x7e,
17847 /* POOL32A7 instruction pool */
17852 NM_POOL32AXF
= 0x07,
17855 /* P.SR instruction pool */
17861 /* P.SHIFT instruction pool */
17869 /* P.ROTX instruction pool */
17874 /* P.INS instruction pool */
17879 /* P.EXT instruction pool */
17884 /* POOL32F_0 (fmt) instruction pool */
17889 NM_SELEQZ_S
= 0x07,
17890 NM_SELEQZ_D
= 0x47,
17894 NM_SELNEZ_S
= 0x0f,
17895 NM_SELNEZ_D
= 0x4f,
17910 /* POOL32F_3 instruction pool */
17914 NM_MINA_FMT
= 0x04,
17915 NM_MAXA_FMT
= 0x05,
17916 NM_POOL32FXF
= 0x07,
17919 /* POOL32F_5 instruction pool */
17921 NM_CMP_CONDN_S
= 0x00,
17922 NM_CMP_CONDN_D
= 0x02,
17925 /* P.GP.LH instruction pool */
17931 /* P.GP.SH instruction pool */
17936 /* P.GP.CP1 instruction pool */
17944 /* P.LS.S0 instruction pool */
17961 NM_P_PREFS9
= 0x03,
17967 /* P.LS.S1 instruction pool */
17969 NM_ASET_ACLR
= 0x02,
17977 /* P.LS.E0 instruction pool */
17993 /* P.PREFE instruction pool */
17999 /* P.LLE instruction pool */
18005 /* P.SCE instruction pool */
18011 /* P.LS.WM instruction pool */
18017 /* P.LS.UAWM instruction pool */
18023 /* P.BR3A instruction pool */
18029 NM_BPOSGE32C
= 0x04,
18032 /* P16.RI instruction pool */
18034 NM_P16_SYSCALL
= 0x01,
18039 /* POOL16C_0 instruction pool */
18041 NM_POOL16C_00
= 0x00,
18044 /* P16.JRC instruction pool */
18050 /* P.SYSCALL instruction pool */
18056 /* P.TRAP instruction pool */
18062 /* P.CMOVE instruction pool */
18068 /* POOL32Axf instruction pool */
18070 NM_POOL32AXF_1
= 0x01,
18071 NM_POOL32AXF_2
= 0x02,
18072 NM_POOL32AXF_4
= 0x04,
18073 NM_POOL32AXF_5
= 0x05,
18074 NM_POOL32AXF_7
= 0x07,
18077 /* POOL32Axf_1 instruction pool */
18079 NM_POOL32AXF_1_0
= 0x00,
18080 NM_POOL32AXF_1_1
= 0x01,
18081 NM_POOL32AXF_1_3
= 0x03,
18082 NM_POOL32AXF_1_4
= 0x04,
18083 NM_POOL32AXF_1_5
= 0x05,
18084 NM_POOL32AXF_1_7
= 0x07,
18087 /* POOL32Axf_2 instruction pool */
18089 NM_POOL32AXF_2_0_7
= 0x00,
18090 NM_POOL32AXF_2_8_15
= 0x01,
18091 NM_POOL32AXF_2_16_23
= 0x02,
18092 NM_POOL32AXF_2_24_31
= 0x03,
18095 /* POOL32Axf_7 instruction pool */
18097 NM_SHRA_R_QB
= 0x0,
18102 /* POOL32Axf_1_0 instruction pool */
18110 /* POOL32Axf_1_1 instruction pool */
18116 /* POOL32Axf_1_3 instruction pool */
18124 /* POOL32Axf_1_4 instruction pool */
18130 /* POOL32Axf_1_5 instruction pool */
18132 NM_MAQ_S_W_PHR
= 0x0,
18133 NM_MAQ_S_W_PHL
= 0x1,
18134 NM_MAQ_SA_W_PHR
= 0x2,
18135 NM_MAQ_SA_W_PHL
= 0x3,
18138 /* POOL32Axf_1_7 instruction pool */
18142 NM_EXTR_RS_W
= 0x2,
18146 /* POOL32Axf_2_0_7 instruction pool */
18149 NM_DPAQ_S_W_PH
= 0x1,
18151 NM_DPSQ_S_W_PH
= 0x3,
18158 /* POOL32Axf_2_8_15 instruction pool */
18160 NM_DPAX_W_PH
= 0x0,
18161 NM_DPAQ_SA_L_W
= 0x1,
18162 NM_DPSX_W_PH
= 0x2,
18163 NM_DPSQ_SA_L_W
= 0x3,
18166 NM_EXTRV_R_W
= 0x7,
18169 /* POOL32Axf_2_16_23 instruction pool */
18171 NM_DPAU_H_QBL
= 0x0,
18172 NM_DPAQX_S_W_PH
= 0x1,
18173 NM_DPSU_H_QBL
= 0x2,
18174 NM_DPSQX_S_W_PH
= 0x3,
18177 NM_MULSA_W_PH
= 0x6,
18178 NM_EXTRV_RS_W
= 0x7,
18181 /* POOL32Axf_2_24_31 instruction pool */
18183 NM_DPAU_H_QBR
= 0x0,
18184 NM_DPAQX_SA_W_PH
= 0x1,
18185 NM_DPSU_H_QBR
= 0x2,
18186 NM_DPSQX_SA_W_PH
= 0x3,
18189 NM_MULSAQ_S_W_PH
= 0x6,
18190 NM_EXTRV_S_H
= 0x7,
18193 /* POOL32Axf_{4, 5} instruction pool */
18212 /* nanoMIPS DSP instructions */
18213 NM_ABSQ_S_QB
= 0x00,
18214 NM_ABSQ_S_PH
= 0x08,
18215 NM_ABSQ_S_W
= 0x10,
18216 NM_PRECEQ_W_PHL
= 0x28,
18217 NM_PRECEQ_W_PHR
= 0x30,
18218 NM_PRECEQU_PH_QBL
= 0x38,
18219 NM_PRECEQU_PH_QBR
= 0x48,
18220 NM_PRECEU_PH_QBL
= 0x58,
18221 NM_PRECEU_PH_QBR
= 0x68,
18222 NM_PRECEQU_PH_QBLA
= 0x39,
18223 NM_PRECEQU_PH_QBRA
= 0x49,
18224 NM_PRECEU_PH_QBLA
= 0x59,
18225 NM_PRECEU_PH_QBRA
= 0x69,
18226 NM_REPLV_PH
= 0x01,
18227 NM_REPLV_QB
= 0x09,
18230 NM_RADDU_W_QB
= 0x78,
18236 /* PP.SR instruction pool */
18240 NM_RESTORE_JRC
= 0x03,
18243 /* P.SR.F instruction pool */
18246 NM_RESTOREF
= 0x01,
18249 /* P16.SYSCALL instruction pool */
18251 NM_SYSCALL16
= 0x00,
18252 NM_HYPCALL16
= 0x01,
18255 /* POOL16C_00 instruction pool */
18263 /* PP.LSX and PP.LSXS instruction pool */
18301 /* ERETx instruction pool */
18307 /* POOL32FxF_{0, 1} insturction pool */
18316 NM_CVT_S_PL
= 0x84,
18317 NM_CVT_S_PU
= 0xa4,
18319 NM_CVT_L_S
= 0x004,
18320 NM_CVT_L_D
= 0x104,
18321 NM_CVT_W_S
= 0x024,
18322 NM_CVT_W_D
= 0x124,
18324 NM_RSQRT_S
= 0x008,
18325 NM_RSQRT_D
= 0x108,
18330 NM_RECIP_S
= 0x048,
18331 NM_RECIP_D
= 0x148,
18333 NM_FLOOR_L_S
= 0x00c,
18334 NM_FLOOR_L_D
= 0x10c,
18336 NM_FLOOR_W_S
= 0x02c,
18337 NM_FLOOR_W_D
= 0x12c,
18339 NM_CEIL_L_S
= 0x04c,
18340 NM_CEIL_L_D
= 0x14c,
18341 NM_CEIL_W_S
= 0x06c,
18342 NM_CEIL_W_D
= 0x16c,
18343 NM_TRUNC_L_S
= 0x08c,
18344 NM_TRUNC_L_D
= 0x18c,
18345 NM_TRUNC_W_S
= 0x0ac,
18346 NM_TRUNC_W_D
= 0x1ac,
18347 NM_ROUND_L_S
= 0x0cc,
18348 NM_ROUND_L_D
= 0x1cc,
18349 NM_ROUND_W_S
= 0x0ec,
18350 NM_ROUND_W_D
= 0x1ec,
18358 NM_CVT_D_S
= 0x04d,
18359 NM_CVT_D_W
= 0x0cd,
18360 NM_CVT_D_L
= 0x14d,
18361 NM_CVT_S_D
= 0x06d,
18362 NM_CVT_S_W
= 0x0ed,
18363 NM_CVT_S_L
= 0x16d,
18366 /* P.LL instruction pool */
18372 /* P.SC instruction pool */
18378 /* P.DVP instruction pool */
18387 * nanoMIPS decoding engine
18392 /* extraction utilities */
18394 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18395 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18396 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18397 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18398 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18399 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18401 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18402 static inline int decode_gpr_gpr3(int r
)
18404 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18406 return map
[r
& 0x7];
18409 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18410 static inline int decode_gpr_gpr3_src_store(int r
)
18412 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18414 return map
[r
& 0x7];
18417 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18418 static inline int decode_gpr_gpr4(int r
)
18420 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18421 16, 17, 18, 19, 20, 21, 22, 23 };
18423 return map
[r
& 0xf];
18426 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18427 static inline int decode_gpr_gpr4_zero(int r
)
18429 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18430 16, 17, 18, 19, 20, 21, 22, 23 };
18432 return map
[r
& 0xf];
18436 /* extraction utilities */
18438 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
18439 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
18440 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
18441 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
18442 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18443 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18446 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18448 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18451 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18452 uint8_t gp
, uint16_t u
)
18455 TCGv va
= tcg_temp_new();
18456 TCGv t0
= tcg_temp_new();
18458 while (counter
!= count
) {
18459 bool use_gp
= gp
&& (counter
== count
- 1);
18460 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18461 int this_offset
= -((counter
+ 1) << 2);
18462 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18463 gen_load_gpr(t0
, this_rt
);
18464 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18465 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18469 /* adjust stack pointer */
18470 gen_adjust_sp(ctx
, -u
);
18476 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18477 uint8_t gp
, uint16_t u
)
18480 TCGv va
= tcg_temp_new();
18481 TCGv t0
= tcg_temp_new();
18483 while (counter
!= count
) {
18484 bool use_gp
= gp
&& (counter
== count
- 1);
18485 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18486 int this_offset
= u
- ((counter
+ 1) << 2);
18487 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18488 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18489 ctx
->default_tcg_memop_mask
);
18490 tcg_gen_ext32s_tl(t0
, t0
);
18491 gen_store_gpr(t0
, this_rt
);
18495 /* adjust stack pointer */
18496 gen_adjust_sp(ctx
, u
);
18502 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18504 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
18505 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
18507 switch (extract32(ctx
->opcode
, 2, 2)) {
18509 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18512 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18515 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18518 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18523 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18525 int rt
= extract32(ctx
->opcode
, 21, 5);
18526 int rs
= extract32(ctx
->opcode
, 16, 5);
18527 int rd
= extract32(ctx
->opcode
, 11, 5);
18529 switch (extract32(ctx
->opcode
, 3, 7)) {
18531 switch (extract32(ctx
->opcode
, 10, 1)) {
18534 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18538 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18544 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18548 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18551 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18554 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18557 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18560 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18563 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18566 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18569 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18573 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18576 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18579 switch (extract32(ctx
->opcode
, 10, 1)) {
18581 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18584 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18589 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18592 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18595 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18598 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18601 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18606 #ifndef CONFIG_USER_ONLY
18607 TCGv t0
= tcg_temp_new();
18608 switch (extract32(ctx
->opcode
, 10, 1)) {
18611 check_cp0_enabled(ctx
);
18612 gen_helper_dvp(t0
, cpu_env
);
18613 gen_store_gpr(t0
, rt
);
18618 check_cp0_enabled(ctx
);
18619 gen_helper_evp(t0
, cpu_env
);
18620 gen_store_gpr(t0
, rt
);
18627 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18632 TCGv t0
= tcg_temp_new();
18633 TCGv t1
= tcg_temp_new();
18634 TCGv t2
= tcg_temp_new();
18636 gen_load_gpr(t1
, rs
);
18637 gen_load_gpr(t2
, rt
);
18638 tcg_gen_add_tl(t0
, t1
, t2
);
18639 tcg_gen_ext32s_tl(t0
, t0
);
18640 tcg_gen_xor_tl(t1
, t1
, t2
);
18641 tcg_gen_xor_tl(t2
, t0
, t2
);
18642 tcg_gen_andc_tl(t1
, t2
, t1
);
18644 /* operands of same sign, result different sign */
18645 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18646 gen_store_gpr(t0
, rd
);
18654 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18657 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18660 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18663 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18666 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18669 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18672 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18675 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18677 #ifndef CONFIG_USER_ONLY
18679 check_cp0_enabled(ctx
);
18681 /* Treat as NOP. */
18684 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18687 check_cp0_enabled(ctx
);
18689 TCGv t0
= tcg_temp_new();
18691 gen_load_gpr(t0
, rt
);
18692 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18696 case NM_D_E_MT_VPE
:
18698 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18699 TCGv t0
= tcg_temp_new();
18706 gen_helper_dmt(t0
);
18707 gen_store_gpr(t0
, rt
);
18708 } else if (rs
== 0) {
18711 gen_helper_dvpe(t0
, cpu_env
);
18712 gen_store_gpr(t0
, rt
);
18714 generate_exception_end(ctx
, EXCP_RI
);
18721 gen_helper_emt(t0
);
18722 gen_store_gpr(t0
, rt
);
18723 } else if (rs
== 0) {
18726 gen_helper_evpe(t0
, cpu_env
);
18727 gen_store_gpr(t0
, rt
);
18729 generate_exception_end(ctx
, EXCP_RI
);
18740 TCGv t0
= tcg_temp_new();
18741 TCGv t1
= tcg_temp_new();
18743 gen_load_gpr(t0
, rt
);
18744 gen_load_gpr(t1
, rs
);
18745 gen_helper_fork(t0
, t1
);
18752 check_cp0_enabled(ctx
);
18754 /* Treat as NOP. */
18757 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18758 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18762 check_cp0_enabled(ctx
);
18763 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18764 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18769 TCGv t0
= tcg_temp_new();
18771 gen_load_gpr(t0
, rs
);
18772 gen_helper_yield(t0
, cpu_env
, t0
);
18773 gen_store_gpr(t0
, rt
);
18779 generate_exception_end(ctx
, EXCP_RI
);
18785 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18786 int ret
, int v1
, int v2
)
18792 t0
= tcg_temp_new_i32();
18794 v0_t
= tcg_temp_new();
18795 v1_t
= tcg_temp_new();
18797 tcg_gen_movi_i32(t0
, v2
>> 3);
18799 gen_load_gpr(v0_t
, ret
);
18800 gen_load_gpr(v1_t
, v1
);
18803 case NM_MAQ_S_W_PHR
:
18805 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18807 case NM_MAQ_S_W_PHL
:
18809 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18811 case NM_MAQ_SA_W_PHR
:
18813 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18815 case NM_MAQ_SA_W_PHL
:
18817 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18820 generate_exception_end(ctx
, EXCP_RI
);
18824 tcg_temp_free_i32(t0
);
18826 tcg_temp_free(v0_t
);
18827 tcg_temp_free(v1_t
);
18831 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18832 int ret
, int v1
, int v2
)
18835 TCGv t0
= tcg_temp_new();
18836 TCGv t1
= tcg_temp_new();
18837 TCGv v0_t
= tcg_temp_new();
18839 gen_load_gpr(v0_t
, v1
);
18842 case NM_POOL32AXF_1_0
:
18844 switch (extract32(ctx
->opcode
, 12, 2)) {
18846 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18849 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18852 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18855 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18859 case NM_POOL32AXF_1_1
:
18861 switch (extract32(ctx
->opcode
, 12, 2)) {
18863 tcg_gen_movi_tl(t0
, v2
);
18864 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18867 tcg_gen_movi_tl(t0
, v2
>> 3);
18868 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18871 generate_exception_end(ctx
, EXCP_RI
);
18875 case NM_POOL32AXF_1_3
:
18877 imm
= extract32(ctx
->opcode
, 14, 7);
18878 switch (extract32(ctx
->opcode
, 12, 2)) {
18880 tcg_gen_movi_tl(t0
, imm
);
18881 gen_helper_rddsp(t0
, t0
, cpu_env
);
18882 gen_store_gpr(t0
, ret
);
18885 gen_load_gpr(t0
, ret
);
18886 tcg_gen_movi_tl(t1
, imm
);
18887 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18890 tcg_gen_movi_tl(t0
, v2
>> 3);
18891 tcg_gen_movi_tl(t1
, v1
);
18892 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18893 gen_store_gpr(t0
, ret
);
18896 tcg_gen_movi_tl(t0
, v2
>> 3);
18897 tcg_gen_movi_tl(t1
, v1
);
18898 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18899 gen_store_gpr(t0
, ret
);
18903 case NM_POOL32AXF_1_4
:
18905 tcg_gen_movi_tl(t0
, v2
>> 2);
18906 switch (extract32(ctx
->opcode
, 12, 1)) {
18908 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
18909 gen_store_gpr(t0
, ret
);
18912 gen_helper_shrl_qb(t0
, t0
, v0_t
);
18913 gen_store_gpr(t0
, ret
);
18917 case NM_POOL32AXF_1_5
:
18918 opc
= extract32(ctx
->opcode
, 12, 2);
18919 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
18921 case NM_POOL32AXF_1_7
:
18923 tcg_gen_movi_tl(t0
, v2
>> 3);
18924 tcg_gen_movi_tl(t1
, v1
);
18925 switch (extract32(ctx
->opcode
, 12, 2)) {
18927 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
18928 gen_store_gpr(t0
, ret
);
18931 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
18932 gen_store_gpr(t0
, ret
);
18935 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
18936 gen_store_gpr(t0
, ret
);
18939 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
18940 gen_store_gpr(t0
, ret
);
18945 generate_exception_end(ctx
, EXCP_RI
);
18951 tcg_temp_free(v0_t
);
18954 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
18955 TCGv v0
, TCGv v1
, int rd
)
18959 t0
= tcg_temp_new_i32();
18961 tcg_gen_movi_i32(t0
, rd
>> 3);
18964 case NM_POOL32AXF_2_0_7
:
18965 switch (extract32(ctx
->opcode
, 9, 3)) {
18968 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
18970 case NM_DPAQ_S_W_PH
:
18972 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18976 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
18978 case NM_DPSQ_S_W_PH
:
18980 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
18983 generate_exception_end(ctx
, EXCP_RI
);
18987 case NM_POOL32AXF_2_8_15
:
18988 switch (extract32(ctx
->opcode
, 9, 3)) {
18991 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
18993 case NM_DPAQ_SA_L_W
:
18995 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
18999 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19001 case NM_DPSQ_SA_L_W
:
19003 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19006 generate_exception_end(ctx
, EXCP_RI
);
19010 case NM_POOL32AXF_2_16_23
:
19011 switch (extract32(ctx
->opcode
, 9, 3)) {
19012 case NM_DPAU_H_QBL
:
19014 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19016 case NM_DPAQX_S_W_PH
:
19018 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19020 case NM_DPSU_H_QBL
:
19022 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19024 case NM_DPSQX_S_W_PH
:
19026 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19028 case NM_MULSA_W_PH
:
19030 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19033 generate_exception_end(ctx
, EXCP_RI
);
19037 case NM_POOL32AXF_2_24_31
:
19038 switch (extract32(ctx
->opcode
, 9, 3)) {
19039 case NM_DPAU_H_QBR
:
19041 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19043 case NM_DPAQX_SA_W_PH
:
19045 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19047 case NM_DPSU_H_QBR
:
19049 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19051 case NM_DPSQX_SA_W_PH
:
19053 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19055 case NM_MULSAQ_S_W_PH
:
19057 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19060 generate_exception_end(ctx
, EXCP_RI
);
19065 generate_exception_end(ctx
, EXCP_RI
);
19069 tcg_temp_free_i32(t0
);
19072 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19073 int rt
, int rs
, int rd
)
19076 TCGv t0
= tcg_temp_new();
19077 TCGv t1
= tcg_temp_new();
19078 TCGv v0_t
= tcg_temp_new();
19079 TCGv v1_t
= tcg_temp_new();
19081 gen_load_gpr(v0_t
, rt
);
19082 gen_load_gpr(v1_t
, rs
);
19085 case NM_POOL32AXF_2_0_7
:
19086 switch (extract32(ctx
->opcode
, 9, 3)) {
19088 case NM_DPAQ_S_W_PH
:
19090 case NM_DPSQ_S_W_PH
:
19091 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19096 gen_load_gpr(t0
, rs
);
19098 if (rd
!= 0 && rd
!= 2) {
19099 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19100 tcg_gen_ext32u_tl(t0
, t0
);
19101 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19102 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19104 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19110 int acc
= extract32(ctx
->opcode
, 14, 2);
19111 TCGv_i64 t2
= tcg_temp_new_i64();
19112 TCGv_i64 t3
= tcg_temp_new_i64();
19114 gen_load_gpr(t0
, rt
);
19115 gen_load_gpr(t1
, rs
);
19116 tcg_gen_ext_tl_i64(t2
, t0
);
19117 tcg_gen_ext_tl_i64(t3
, t1
);
19118 tcg_gen_mul_i64(t2
, t2
, t3
);
19119 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19120 tcg_gen_add_i64(t2
, t2
, t3
);
19121 tcg_temp_free_i64(t3
);
19122 gen_move_low32(cpu_LO
[acc
], t2
);
19123 gen_move_high32(cpu_HI
[acc
], t2
);
19124 tcg_temp_free_i64(t2
);
19130 int acc
= extract32(ctx
->opcode
, 14, 2);
19131 TCGv_i32 t2
= tcg_temp_new_i32();
19132 TCGv_i32 t3
= tcg_temp_new_i32();
19134 gen_load_gpr(t0
, rs
);
19135 gen_load_gpr(t1
, rt
);
19136 tcg_gen_trunc_tl_i32(t2
, t0
);
19137 tcg_gen_trunc_tl_i32(t3
, t1
);
19138 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19139 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19140 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19141 tcg_temp_free_i32(t2
);
19142 tcg_temp_free_i32(t3
);
19147 gen_load_gpr(v1_t
, rs
);
19148 tcg_gen_movi_tl(t0
, rd
>> 3);
19149 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19150 gen_store_gpr(t0
, ret
);
19154 case NM_POOL32AXF_2_8_15
:
19155 switch (extract32(ctx
->opcode
, 9, 3)) {
19157 case NM_DPAQ_SA_L_W
:
19159 case NM_DPSQ_SA_L_W
:
19160 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19165 int acc
= extract32(ctx
->opcode
, 14, 2);
19166 TCGv_i64 t2
= tcg_temp_new_i64();
19167 TCGv_i64 t3
= tcg_temp_new_i64();
19169 gen_load_gpr(t0
, rs
);
19170 gen_load_gpr(t1
, rt
);
19171 tcg_gen_ext32u_tl(t0
, t0
);
19172 tcg_gen_ext32u_tl(t1
, t1
);
19173 tcg_gen_extu_tl_i64(t2
, t0
);
19174 tcg_gen_extu_tl_i64(t3
, t1
);
19175 tcg_gen_mul_i64(t2
, t2
, t3
);
19176 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19177 tcg_gen_add_i64(t2
, t2
, t3
);
19178 tcg_temp_free_i64(t3
);
19179 gen_move_low32(cpu_LO
[acc
], t2
);
19180 gen_move_high32(cpu_HI
[acc
], t2
);
19181 tcg_temp_free_i64(t2
);
19187 int acc
= extract32(ctx
->opcode
, 14, 2);
19188 TCGv_i32 t2
= tcg_temp_new_i32();
19189 TCGv_i32 t3
= tcg_temp_new_i32();
19191 gen_load_gpr(t0
, rs
);
19192 gen_load_gpr(t1
, rt
);
19193 tcg_gen_trunc_tl_i32(t2
, t0
);
19194 tcg_gen_trunc_tl_i32(t3
, t1
);
19195 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19196 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19197 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19198 tcg_temp_free_i32(t2
);
19199 tcg_temp_free_i32(t3
);
19204 tcg_gen_movi_tl(t0
, rd
>> 3);
19205 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19206 gen_store_gpr(t0
, ret
);
19209 generate_exception_end(ctx
, EXCP_RI
);
19213 case NM_POOL32AXF_2_16_23
:
19214 switch (extract32(ctx
->opcode
, 9, 3)) {
19215 case NM_DPAU_H_QBL
:
19216 case NM_DPAQX_S_W_PH
:
19217 case NM_DPSU_H_QBL
:
19218 case NM_DPSQX_S_W_PH
:
19219 case NM_MULSA_W_PH
:
19220 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19224 tcg_gen_movi_tl(t0
, rd
>> 3);
19225 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19226 gen_store_gpr(t0
, ret
);
19231 int acc
= extract32(ctx
->opcode
, 14, 2);
19232 TCGv_i64 t2
= tcg_temp_new_i64();
19233 TCGv_i64 t3
= tcg_temp_new_i64();
19235 gen_load_gpr(t0
, rs
);
19236 gen_load_gpr(t1
, rt
);
19237 tcg_gen_ext_tl_i64(t2
, t0
);
19238 tcg_gen_ext_tl_i64(t3
, t1
);
19239 tcg_gen_mul_i64(t2
, t2
, t3
);
19240 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19241 tcg_gen_sub_i64(t2
, t3
, t2
);
19242 tcg_temp_free_i64(t3
);
19243 gen_move_low32(cpu_LO
[acc
], t2
);
19244 gen_move_high32(cpu_HI
[acc
], t2
);
19245 tcg_temp_free_i64(t2
);
19248 case NM_EXTRV_RS_W
:
19250 tcg_gen_movi_tl(t0
, rd
>> 3);
19251 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19252 gen_store_gpr(t0
, ret
);
19256 case NM_POOL32AXF_2_24_31
:
19257 switch (extract32(ctx
->opcode
, 9, 3)) {
19258 case NM_DPAU_H_QBR
:
19259 case NM_DPAQX_SA_W_PH
:
19260 case NM_DPSU_H_QBR
:
19261 case NM_DPSQX_SA_W_PH
:
19262 case NM_MULSAQ_S_W_PH
:
19263 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19267 tcg_gen_movi_tl(t0
, rd
>> 3);
19268 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19269 gen_store_gpr(t0
, ret
);
19274 int acc
= extract32(ctx
->opcode
, 14, 2);
19275 TCGv_i64 t2
= tcg_temp_new_i64();
19276 TCGv_i64 t3
= tcg_temp_new_i64();
19278 gen_load_gpr(t0
, rs
);
19279 gen_load_gpr(t1
, rt
);
19280 tcg_gen_ext32u_tl(t0
, t0
);
19281 tcg_gen_ext32u_tl(t1
, t1
);
19282 tcg_gen_extu_tl_i64(t2
, t0
);
19283 tcg_gen_extu_tl_i64(t3
, t1
);
19284 tcg_gen_mul_i64(t2
, t2
, t3
);
19285 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19286 tcg_gen_sub_i64(t2
, t3
, t2
);
19287 tcg_temp_free_i64(t3
);
19288 gen_move_low32(cpu_LO
[acc
], t2
);
19289 gen_move_high32(cpu_HI
[acc
], t2
);
19290 tcg_temp_free_i64(t2
);
19295 tcg_gen_movi_tl(t0
, rd
>> 3);
19296 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19297 gen_store_gpr(t0
, ret
);
19302 generate_exception_end(ctx
, EXCP_RI
);
19309 tcg_temp_free(v0_t
);
19310 tcg_temp_free(v1_t
);
19313 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19317 TCGv t0
= tcg_temp_new();
19318 TCGv v0_t
= tcg_temp_new();
19320 gen_load_gpr(v0_t
, rs
);
19325 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19326 gen_store_gpr(v0_t
, ret
);
19330 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19331 gen_store_gpr(v0_t
, ret
);
19335 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19336 gen_store_gpr(v0_t
, ret
);
19338 case NM_PRECEQ_W_PHL
:
19340 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19341 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19342 gen_store_gpr(v0_t
, ret
);
19344 case NM_PRECEQ_W_PHR
:
19346 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19347 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19348 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19349 gen_store_gpr(v0_t
, ret
);
19351 case NM_PRECEQU_PH_QBL
:
19353 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19354 gen_store_gpr(v0_t
, ret
);
19356 case NM_PRECEQU_PH_QBR
:
19358 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19359 gen_store_gpr(v0_t
, ret
);
19361 case NM_PRECEQU_PH_QBLA
:
19363 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19364 gen_store_gpr(v0_t
, ret
);
19366 case NM_PRECEQU_PH_QBRA
:
19368 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19369 gen_store_gpr(v0_t
, ret
);
19371 case NM_PRECEU_PH_QBL
:
19373 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19374 gen_store_gpr(v0_t
, ret
);
19376 case NM_PRECEU_PH_QBR
:
19378 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19379 gen_store_gpr(v0_t
, ret
);
19381 case NM_PRECEU_PH_QBLA
:
19383 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19384 gen_store_gpr(v0_t
, ret
);
19386 case NM_PRECEU_PH_QBRA
:
19388 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19389 gen_store_gpr(v0_t
, ret
);
19393 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19394 tcg_gen_shli_tl(t0
, v0_t
, 16);
19395 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19396 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19397 gen_store_gpr(v0_t
, ret
);
19401 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19402 tcg_gen_shli_tl(t0
, v0_t
, 8);
19403 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19404 tcg_gen_shli_tl(t0
, v0_t
, 16);
19405 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19406 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19407 gen_store_gpr(v0_t
, ret
);
19411 gen_helper_bitrev(v0_t
, v0_t
);
19412 gen_store_gpr(v0_t
, ret
);
19417 TCGv tv0
= tcg_temp_new();
19419 gen_load_gpr(tv0
, rt
);
19420 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19421 gen_store_gpr(v0_t
, ret
);
19422 tcg_temp_free(tv0
);
19425 case NM_RADDU_W_QB
:
19427 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19428 gen_store_gpr(v0_t
, ret
);
19431 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19435 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19439 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19442 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19445 generate_exception_end(ctx
, EXCP_RI
);
19449 tcg_temp_free(v0_t
);
19453 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19454 int rt
, int rs
, int rd
)
19456 TCGv t0
= tcg_temp_new();
19457 TCGv rs_t
= tcg_temp_new();
19459 gen_load_gpr(rs_t
, rs
);
19464 tcg_gen_movi_tl(t0
, rd
>> 2);
19465 switch (extract32(ctx
->opcode
, 12, 1)) {
19468 gen_helper_shra_qb(t0
, t0
, rs_t
);
19469 gen_store_gpr(t0
, rt
);
19473 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19474 gen_store_gpr(t0
, rt
);
19480 tcg_gen_movi_tl(t0
, rd
>> 1);
19481 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19482 gen_store_gpr(t0
, rt
);
19488 target_long result
;
19489 imm
= extract32(ctx
->opcode
, 13, 8);
19490 result
= (uint32_t)imm
<< 24 |
19491 (uint32_t)imm
<< 16 |
19492 (uint32_t)imm
<< 8 |
19494 result
= (int32_t)result
;
19495 tcg_gen_movi_tl(t0
, result
);
19496 gen_store_gpr(t0
, rt
);
19500 generate_exception_end(ctx
, EXCP_RI
);
19504 tcg_temp_free(rs_t
);
19508 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19510 int rt
= extract32(ctx
->opcode
, 21, 5);
19511 int rs
= extract32(ctx
->opcode
, 16, 5);
19512 int rd
= extract32(ctx
->opcode
, 11, 5);
19514 switch (extract32(ctx
->opcode
, 6, 3)) {
19515 case NM_POOL32AXF_1
:
19517 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19518 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19521 case NM_POOL32AXF_2
:
19523 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19524 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19527 case NM_POOL32AXF_4
:
19529 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19530 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19533 case NM_POOL32AXF_5
:
19534 switch (extract32(ctx
->opcode
, 9, 7)) {
19535 #ifndef CONFIG_USER_ONLY
19537 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19540 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19543 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19546 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19549 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19552 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19555 check_cp0_enabled(ctx
);
19557 TCGv t0
= tcg_temp_new();
19559 save_cpu_state(ctx
, 1);
19560 gen_helper_di(t0
, cpu_env
);
19561 gen_store_gpr(t0
, rt
);
19562 /* Stop translation as we may have switched the execution mode */
19563 ctx
->base
.is_jmp
= DISAS_STOP
;
19568 check_cp0_enabled(ctx
);
19570 TCGv t0
= tcg_temp_new();
19572 save_cpu_state(ctx
, 1);
19573 gen_helper_ei(t0
, cpu_env
);
19574 gen_store_gpr(t0
, rt
);
19575 /* Stop translation as we may have switched the execution mode */
19576 ctx
->base
.is_jmp
= DISAS_STOP
;
19581 gen_load_srsgpr(rs
, rt
);
19584 gen_store_srsgpr(rs
, rt
);
19587 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19590 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19593 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19597 generate_exception_end(ctx
, EXCP_RI
);
19601 case NM_POOL32AXF_7
:
19603 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19604 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19608 generate_exception_end(ctx
, EXCP_RI
);
19613 /* Immediate Value Compact Branches */
19614 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19615 int rt
, int32_t imm
, int32_t offset
)
19618 int bcond_compute
= 0;
19619 TCGv t0
= tcg_temp_new();
19620 TCGv t1
= tcg_temp_new();
19622 gen_load_gpr(t0
, rt
);
19623 tcg_gen_movi_tl(t1
, imm
);
19624 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19626 /* Load needed operands and calculate btarget */
19629 if (rt
== 0 && imm
== 0) {
19630 /* Unconditional branch */
19631 } else if (rt
== 0 && imm
!= 0) {
19636 cond
= TCG_COND_EQ
;
19642 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19643 generate_exception_end(ctx
, EXCP_RI
);
19645 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19646 /* Unconditional branch */
19647 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19651 tcg_gen_shri_tl(t0
, t0
, imm
);
19652 tcg_gen_andi_tl(t0
, t0
, 1);
19653 tcg_gen_movi_tl(t1
, 0);
19655 if (opc
== NM_BBEQZC
) {
19656 cond
= TCG_COND_EQ
;
19658 cond
= TCG_COND_NE
;
19663 if (rt
== 0 && imm
== 0) {
19666 } else if (rt
== 0 && imm
!= 0) {
19667 /* Unconditional branch */
19670 cond
= TCG_COND_NE
;
19674 if (rt
== 0 && imm
== 0) {
19675 /* Unconditional branch */
19678 cond
= TCG_COND_GE
;
19683 cond
= TCG_COND_LT
;
19686 if (rt
== 0 && imm
== 0) {
19687 /* Unconditional branch */
19690 cond
= TCG_COND_GEU
;
19695 cond
= TCG_COND_LTU
;
19698 MIPS_INVAL("Immediate Value Compact branch");
19699 generate_exception_end(ctx
, EXCP_RI
);
19703 if (bcond_compute
== 0) {
19704 /* Uncoditional compact branch */
19705 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19707 /* Conditional compact branch */
19708 TCGLabel
*fs
= gen_new_label();
19710 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19712 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19715 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19723 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19724 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19727 TCGv t0
= tcg_temp_new();
19728 TCGv t1
= tcg_temp_new();
19731 gen_load_gpr(t0
, rs
);
19735 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19738 /* calculate btarget */
19739 tcg_gen_shli_tl(t0
, t0
, 1);
19740 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19741 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19743 /* unconditional branch to register */
19744 tcg_gen_mov_tl(cpu_PC
, btarget
);
19745 tcg_gen_lookup_and_goto_ptr();
19751 /* nanoMIPS Branches */
19752 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19753 int rs
, int rt
, int32_t offset
)
19755 int bcond_compute
= 0;
19756 TCGv t0
= tcg_temp_new();
19757 TCGv t1
= tcg_temp_new();
19759 /* Load needed operands and calculate btarget */
19761 /* compact branch */
19764 gen_load_gpr(t0
, rs
);
19765 gen_load_gpr(t1
, rt
);
19767 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19771 if (rs
== 0 || rs
== rt
) {
19772 /* OPC_BLEZALC, OPC_BGEZALC */
19773 /* OPC_BGTZALC, OPC_BLTZALC */
19774 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19776 gen_load_gpr(t0
, rs
);
19777 gen_load_gpr(t1
, rt
);
19779 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19782 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19786 /* OPC_BEQZC, OPC_BNEZC */
19787 gen_load_gpr(t0
, rs
);
19789 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19791 /* OPC_JIC, OPC_JIALC */
19792 TCGv tbase
= tcg_temp_new();
19793 TCGv toffset
= tcg_temp_new();
19795 gen_load_gpr(tbase
, rt
);
19796 tcg_gen_movi_tl(toffset
, offset
);
19797 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19798 tcg_temp_free(tbase
);
19799 tcg_temp_free(toffset
);
19803 MIPS_INVAL("Compact branch/jump");
19804 generate_exception_end(ctx
, EXCP_RI
);
19808 if (bcond_compute
== 0) {
19809 /* Uncoditional compact branch */
19812 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19815 MIPS_INVAL("Compact branch/jump");
19816 generate_exception_end(ctx
, EXCP_RI
);
19820 /* Conditional compact branch */
19821 TCGLabel
*fs
= gen_new_label();
19825 if (rs
== 0 && rt
!= 0) {
19827 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19828 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19830 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19833 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19837 if (rs
== 0 && rt
!= 0) {
19839 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19840 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19842 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19845 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19849 if (rs
== 0 && rt
!= 0) {
19851 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19852 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19854 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19857 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19861 if (rs
== 0 && rt
!= 0) {
19863 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19864 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19866 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19869 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19873 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19876 MIPS_INVAL("Compact conditional branch/jump");
19877 generate_exception_end(ctx
, EXCP_RI
);
19881 /* Generating branch here as compact branches don't have delay slot */
19882 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19885 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19894 /* nanoMIPS CP1 Branches */
19895 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19896 int32_t ft
, int32_t offset
)
19898 target_ulong btarget
;
19899 TCGv_i64 t0
= tcg_temp_new_i64();
19901 gen_load_fpr64(ctx
, t0
, ft
);
19902 tcg_gen_andi_i64(t0
, t0
, 1);
19904 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19908 tcg_gen_xori_i64(t0
, t0
, 1);
19909 ctx
->hflags
|= MIPS_HFLAG_BC
;
19912 /* t0 already set */
19913 ctx
->hflags
|= MIPS_HFLAG_BC
;
19916 MIPS_INVAL("cp1 cond branch");
19917 generate_exception_end(ctx
, EXCP_RI
);
19921 tcg_gen_trunc_i64_tl(bcond
, t0
);
19923 ctx
->btarget
= btarget
;
19926 tcg_temp_free_i64(t0
);
19930 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
19933 t0
= tcg_temp_new();
19934 t1
= tcg_temp_new();
19936 gen_load_gpr(t0
, rs
);
19937 gen_load_gpr(t1
, rt
);
19939 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
19940 /* PP.LSXS instructions require shifting */
19941 switch (extract32(ctx
->opcode
, 7, 4)) {
19946 tcg_gen_shli_tl(t0
, t0
, 1);
19953 tcg_gen_shli_tl(t0
, t0
, 2);
19957 tcg_gen_shli_tl(t0
, t0
, 3);
19961 gen_op_addr_add(ctx
, t0
, t0
, t1
);
19963 switch (extract32(ctx
->opcode
, 7, 4)) {
19965 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19967 gen_store_gpr(t0
, rd
);
19971 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19973 gen_store_gpr(t0
, rd
);
19977 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19979 gen_store_gpr(t0
, rd
);
19982 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19984 gen_store_gpr(t0
, rd
);
19988 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
19990 gen_store_gpr(t0
, rd
);
19994 gen_load_gpr(t1
, rd
);
19995 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20001 gen_load_gpr(t1
, rd
);
20002 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20008 gen_load_gpr(t1
, rd
);
20009 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20013 /*case NM_LWC1XS:*/
20015 /*case NM_LDC1XS:*/
20017 /*case NM_SWC1XS:*/
20019 /*case NM_SDC1XS:*/
20020 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20021 check_cp1_enabled(ctx
);
20022 switch (extract32(ctx
->opcode
, 7, 4)) {
20024 /*case NM_LWC1XS:*/
20025 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20028 /*case NM_LDC1XS:*/
20029 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20032 /*case NM_SWC1XS:*/
20033 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20036 /*case NM_SDC1XS:*/
20037 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20041 generate_exception_err(ctx
, EXCP_CpU
, 1);
20045 generate_exception_end(ctx
, EXCP_RI
);
20053 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20057 rt
= extract32(ctx
->opcode
, 21, 5);
20058 rs
= extract32(ctx
->opcode
, 16, 5);
20059 rd
= extract32(ctx
->opcode
, 11, 5);
20061 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20062 generate_exception_end(ctx
, EXCP_RI
);
20065 check_cp1_enabled(ctx
);
20066 switch (extract32(ctx
->opcode
, 0, 3)) {
20068 switch (extract32(ctx
->opcode
, 3, 7)) {
20070 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20073 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20076 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20079 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20082 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20085 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20088 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20091 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20094 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20097 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20100 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20103 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20106 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20109 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20112 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20115 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20118 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20121 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20124 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20127 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20130 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20133 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20136 generate_exception_end(ctx
, EXCP_RI
);
20141 switch (extract32(ctx
->opcode
, 3, 3)) {
20143 switch (extract32(ctx
->opcode
, 9, 1)) {
20145 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20148 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20153 switch (extract32(ctx
->opcode
, 9, 1)) {
20155 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20158 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20163 switch (extract32(ctx
->opcode
, 9, 1)) {
20165 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20168 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20173 switch (extract32(ctx
->opcode
, 9, 1)) {
20175 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20178 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20183 switch (extract32(ctx
->opcode
, 6, 8)) {
20185 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20188 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20191 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20194 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20197 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20200 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20203 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20206 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20209 switch (extract32(ctx
->opcode
, 6, 9)) {
20211 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20214 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20217 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20220 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20223 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20226 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20229 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20232 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20235 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20238 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20241 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20244 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20247 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20250 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20253 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20256 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20259 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20262 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20265 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20268 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20271 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20274 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20277 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20280 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20283 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20286 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20289 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20292 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20295 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20298 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20301 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20304 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20307 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20310 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20313 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20316 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20319 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20322 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20325 generate_exception_end(ctx
, EXCP_RI
);
20334 switch (extract32(ctx
->opcode
, 3, 3)) {
20335 case NM_CMP_CONDN_S
:
20336 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20338 case NM_CMP_CONDN_D
:
20339 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20342 generate_exception_end(ctx
, EXCP_RI
);
20347 generate_exception_end(ctx
, EXCP_RI
);
20352 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20353 int rd
, int rs
, int rt
)
20356 TCGv t0
= tcg_temp_new();
20357 TCGv v1_t
= tcg_temp_new();
20358 TCGv v2_t
= tcg_temp_new();
20360 gen_load_gpr(v1_t
, rs
);
20361 gen_load_gpr(v2_t
, rt
);
20366 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20370 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20374 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20376 case NM_CMPU_EQ_QB
:
20378 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20380 case NM_CMPU_LT_QB
:
20382 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20384 case NM_CMPU_LE_QB
:
20386 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20388 case NM_CMPGU_EQ_QB
:
20390 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20391 gen_store_gpr(v1_t
, ret
);
20393 case NM_CMPGU_LT_QB
:
20395 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20396 gen_store_gpr(v1_t
, ret
);
20398 case NM_CMPGU_LE_QB
:
20400 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20401 gen_store_gpr(v1_t
, ret
);
20403 case NM_CMPGDU_EQ_QB
:
20405 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20406 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20407 gen_store_gpr(v1_t
, ret
);
20409 case NM_CMPGDU_LT_QB
:
20411 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20412 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20413 gen_store_gpr(v1_t
, ret
);
20415 case NM_CMPGDU_LE_QB
:
20417 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20418 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20419 gen_store_gpr(v1_t
, ret
);
20423 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20424 gen_store_gpr(v1_t
, ret
);
20428 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20429 gen_store_gpr(v1_t
, ret
);
20433 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20434 gen_store_gpr(v1_t
, ret
);
20438 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20439 gen_store_gpr(v1_t
, ret
);
20443 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20444 gen_store_gpr(v1_t
, ret
);
20448 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20449 gen_store_gpr(v1_t
, ret
);
20453 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20454 gen_store_gpr(v1_t
, ret
);
20458 switch (extract32(ctx
->opcode
, 10, 1)) {
20461 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20462 gen_store_gpr(v1_t
, ret
);
20466 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20467 gen_store_gpr(v1_t
, ret
);
20471 case NM_ADDQH_R_PH
:
20473 switch (extract32(ctx
->opcode
, 10, 1)) {
20476 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20477 gen_store_gpr(v1_t
, ret
);
20481 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20482 gen_store_gpr(v1_t
, ret
);
20488 switch (extract32(ctx
->opcode
, 10, 1)) {
20491 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20492 gen_store_gpr(v1_t
, ret
);
20496 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20497 gen_store_gpr(v1_t
, ret
);
20503 switch (extract32(ctx
->opcode
, 10, 1)) {
20506 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20507 gen_store_gpr(v1_t
, ret
);
20511 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20512 gen_store_gpr(v1_t
, ret
);
20518 switch (extract32(ctx
->opcode
, 10, 1)) {
20521 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20522 gen_store_gpr(v1_t
, ret
);
20526 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20527 gen_store_gpr(v1_t
, ret
);
20531 case NM_ADDUH_R_QB
:
20533 switch (extract32(ctx
->opcode
, 10, 1)) {
20536 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20537 gen_store_gpr(v1_t
, ret
);
20541 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20542 gen_store_gpr(v1_t
, ret
);
20546 case NM_SHRAV_R_PH
:
20548 switch (extract32(ctx
->opcode
, 10, 1)) {
20551 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20552 gen_store_gpr(v1_t
, ret
);
20556 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20557 gen_store_gpr(v1_t
, ret
);
20561 case NM_SHRAV_R_QB
:
20563 switch (extract32(ctx
->opcode
, 10, 1)) {
20566 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20567 gen_store_gpr(v1_t
, ret
);
20571 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20572 gen_store_gpr(v1_t
, ret
);
20578 switch (extract32(ctx
->opcode
, 10, 1)) {
20581 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20582 gen_store_gpr(v1_t
, ret
);
20586 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20587 gen_store_gpr(v1_t
, ret
);
20591 case NM_SUBQH_R_PH
:
20593 switch (extract32(ctx
->opcode
, 10, 1)) {
20596 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20597 gen_store_gpr(v1_t
, ret
);
20601 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20602 gen_store_gpr(v1_t
, ret
);
20608 switch (extract32(ctx
->opcode
, 10, 1)) {
20611 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20612 gen_store_gpr(v1_t
, ret
);
20616 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20617 gen_store_gpr(v1_t
, ret
);
20623 switch (extract32(ctx
->opcode
, 10, 1)) {
20626 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20627 gen_store_gpr(v1_t
, ret
);
20631 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20632 gen_store_gpr(v1_t
, ret
);
20638 switch (extract32(ctx
->opcode
, 10, 1)) {
20641 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20642 gen_store_gpr(v1_t
, ret
);
20646 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20647 gen_store_gpr(v1_t
, ret
);
20651 case NM_SUBUH_R_QB
:
20653 switch (extract32(ctx
->opcode
, 10, 1)) {
20656 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20657 gen_store_gpr(v1_t
, ret
);
20661 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20662 gen_store_gpr(v1_t
, ret
);
20666 case NM_SHLLV_S_PH
:
20668 switch (extract32(ctx
->opcode
, 10, 1)) {
20671 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20672 gen_store_gpr(v1_t
, ret
);
20676 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20677 gen_store_gpr(v1_t
, ret
);
20681 case NM_PRECR_SRA_R_PH_W
:
20683 switch (extract32(ctx
->opcode
, 10, 1)) {
20685 /* PRECR_SRA_PH_W */
20687 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20688 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20690 gen_store_gpr(v1_t
, rt
);
20691 tcg_temp_free_i32(sa_t
);
20695 /* PRECR_SRA_R_PH_W */
20697 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20698 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20700 gen_store_gpr(v1_t
, rt
);
20701 tcg_temp_free_i32(sa_t
);
20706 case NM_MULEU_S_PH_QBL
:
20708 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20709 gen_store_gpr(v1_t
, ret
);
20711 case NM_MULEU_S_PH_QBR
:
20713 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20714 gen_store_gpr(v1_t
, ret
);
20716 case NM_MULQ_RS_PH
:
20718 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20719 gen_store_gpr(v1_t
, ret
);
20723 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20724 gen_store_gpr(v1_t
, ret
);
20728 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20729 gen_store_gpr(v1_t
, ret
);
20733 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20734 gen_store_gpr(v1_t
, ret
);
20738 gen_load_gpr(t0
, rs
);
20740 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20742 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20746 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20747 gen_store_gpr(v1_t
, ret
);
20751 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20752 gen_store_gpr(v1_t
, ret
);
20756 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20757 gen_store_gpr(v1_t
, ret
);
20761 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20762 gen_store_gpr(v1_t
, ret
);
20766 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20767 gen_store_gpr(v1_t
, ret
);
20771 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20772 gen_store_gpr(v1_t
, ret
);
20777 TCGv tv0
= tcg_temp_new();
20778 TCGv tv1
= tcg_temp_new();
20779 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20781 tcg_gen_movi_tl(tv0
, rd
>> 3);
20782 tcg_gen_movi_tl(tv1
, imm
);
20783 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20786 case NM_MULEQ_S_W_PHL
:
20788 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20789 gen_store_gpr(v1_t
, ret
);
20791 case NM_MULEQ_S_W_PHR
:
20793 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20794 gen_store_gpr(v1_t
, ret
);
20798 switch (extract32(ctx
->opcode
, 10, 1)) {
20801 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20802 gen_store_gpr(v1_t
, ret
);
20806 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20807 gen_store_gpr(v1_t
, ret
);
20811 case NM_PRECR_QB_PH
:
20813 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20814 gen_store_gpr(v1_t
, ret
);
20816 case NM_PRECRQ_QB_PH
:
20818 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20819 gen_store_gpr(v1_t
, ret
);
20821 case NM_PRECRQ_PH_W
:
20823 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20824 gen_store_gpr(v1_t
, ret
);
20826 case NM_PRECRQ_RS_PH_W
:
20828 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20829 gen_store_gpr(v1_t
, ret
);
20831 case NM_PRECRQU_S_QB_PH
:
20833 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20834 gen_store_gpr(v1_t
, ret
);
20838 tcg_gen_movi_tl(t0
, rd
);
20839 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20840 gen_store_gpr(v1_t
, rt
);
20844 tcg_gen_movi_tl(t0
, rd
>> 1);
20845 switch (extract32(ctx
->opcode
, 10, 1)) {
20848 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20849 gen_store_gpr(v1_t
, rt
);
20853 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20854 gen_store_gpr(v1_t
, rt
);
20860 tcg_gen_movi_tl(t0
, rd
>> 1);
20861 switch (extract32(ctx
->opcode
, 10, 2)) {
20864 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20865 gen_store_gpr(v1_t
, rt
);
20869 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20870 gen_store_gpr(v1_t
, rt
);
20873 generate_exception_end(ctx
, EXCP_RI
);
20879 tcg_gen_movi_tl(t0
, rd
);
20880 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20881 gen_store_gpr(v1_t
, rt
);
20887 imm
= sextract32(ctx
->opcode
, 11, 11);
20888 imm
= (int16_t)(imm
<< 6) >> 6;
20890 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20895 generate_exception_end(ctx
, EXCP_RI
);
20900 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
20908 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
20909 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
20911 rt
= extract32(ctx
->opcode
, 21, 5);
20912 rs
= extract32(ctx
->opcode
, 16, 5);
20913 rd
= extract32(ctx
->opcode
, 11, 5);
20915 op
= extract32(ctx
->opcode
, 26, 6);
20920 switch (extract32(ctx
->opcode
, 19, 2)) {
20923 generate_exception_end(ctx
, EXCP_RI
);
20926 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
20927 generate_exception_end(ctx
, EXCP_SYSCALL
);
20929 generate_exception_end(ctx
, EXCP_RI
);
20933 generate_exception_end(ctx
, EXCP_BREAK
);
20936 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
20937 gen_helper_do_semihosting(cpu_env
);
20939 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
20940 generate_exception_end(ctx
, EXCP_RI
);
20942 generate_exception_end(ctx
, EXCP_DBp
);
20949 imm
= extract32(ctx
->opcode
, 0, 16);
20951 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
20953 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
20955 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20960 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
20961 extract32(ctx
->opcode
, 1, 20) << 1;
20962 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20963 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
20967 switch (ctx
->opcode
& 0x07) {
20969 gen_pool32a0_nanomips_insn(env
, ctx
);
20973 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
20974 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
20978 switch (extract32(ctx
->opcode
, 3, 3)) {
20980 gen_p_lsx(ctx
, rd
, rs
, rt
);
20983 /* In nanoMIPS, the shift field directly encodes the shift
20984 * amount, meaning that the supported shift values are in
20985 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
20986 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
20987 extract32(ctx
->opcode
, 9, 2) - 1);
20990 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
20993 gen_pool32axf_nanomips_insn(env
, ctx
);
20996 generate_exception_end(ctx
, EXCP_RI
);
21001 generate_exception_end(ctx
, EXCP_RI
);
21006 switch (ctx
->opcode
& 0x03) {
21009 offset
= extract32(ctx
->opcode
, 0, 21);
21010 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21014 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21017 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21020 generate_exception_end(ctx
, EXCP_RI
);
21026 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21027 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21028 switch (extract32(ctx
->opcode
, 16, 5)) {
21032 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21038 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21039 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21045 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21051 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21054 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21061 t0
= tcg_temp_new();
21063 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21066 tcg_gen_movi_tl(t0
, addr
);
21067 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21075 t0
= tcg_temp_new();
21076 t1
= tcg_temp_new();
21078 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21081 tcg_gen_movi_tl(t0
, addr
);
21082 gen_load_gpr(t1
, rt
);
21084 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21091 generate_exception_end(ctx
, EXCP_RI
);
21097 switch (extract32(ctx
->opcode
, 12, 4)) {
21099 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21102 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21105 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21108 switch (extract32(ctx
->opcode
, 20, 1)) {
21110 switch (ctx
->opcode
& 3) {
21112 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21113 extract32(ctx
->opcode
, 2, 1),
21114 extract32(ctx
->opcode
, 3, 9) << 3);
21117 case NM_RESTORE_JRC
:
21118 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21119 extract32(ctx
->opcode
, 2, 1),
21120 extract32(ctx
->opcode
, 3, 9) << 3);
21121 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21122 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21126 generate_exception_end(ctx
, EXCP_RI
);
21131 generate_exception_end(ctx
, EXCP_RI
);
21136 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21139 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21143 TCGv t0
= tcg_temp_new();
21145 imm
= extract32(ctx
->opcode
, 0, 12);
21146 gen_load_gpr(t0
, rs
);
21147 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21148 gen_store_gpr(t0
, rt
);
21154 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21155 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21159 int shift
= extract32(ctx
->opcode
, 0, 5);
21160 switch (extract32(ctx
->opcode
, 5, 4)) {
21162 if (rt
== 0 && shift
== 0) {
21164 } else if (rt
== 0 && shift
== 3) {
21165 /* EHB - treat as NOP */
21166 } else if (rt
== 0 && shift
== 5) {
21167 /* PAUSE - treat as NOP */
21168 } else if (rt
== 0 && shift
== 6) {
21170 gen_sync(extract32(ctx
->opcode
, 16, 5));
21173 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21174 extract32(ctx
->opcode
, 0, 5));
21178 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21179 extract32(ctx
->opcode
, 0, 5));
21182 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21183 extract32(ctx
->opcode
, 0, 5));
21186 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21187 extract32(ctx
->opcode
, 0, 5));
21195 TCGv t0
= tcg_temp_new();
21196 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21197 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21199 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21201 gen_load_gpr(t0
, rs
);
21202 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21205 tcg_temp_free_i32(shift
);
21206 tcg_temp_free_i32(shiftx
);
21207 tcg_temp_free_i32(stripe
);
21211 switch (((ctx
->opcode
>> 10) & 2) |
21212 (extract32(ctx
->opcode
, 5, 1))) {
21215 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21216 extract32(ctx
->opcode
, 6, 5));
21219 generate_exception_end(ctx
, EXCP_RI
);
21224 switch (((ctx
->opcode
>> 10) & 2) |
21225 (extract32(ctx
->opcode
, 5, 1))) {
21228 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21229 extract32(ctx
->opcode
, 6, 5));
21232 generate_exception_end(ctx
, EXCP_RI
);
21237 generate_exception_end(ctx
, EXCP_RI
);
21242 gen_pool32f_nanomips_insn(ctx
);
21247 switch (extract32(ctx
->opcode
, 1, 1)) {
21250 tcg_gen_movi_tl(cpu_gpr
[rt
],
21251 sextract32(ctx
->opcode
, 0, 1) << 31 |
21252 extract32(ctx
->opcode
, 2, 10) << 21 |
21253 extract32(ctx
->opcode
, 12, 9) << 12);
21258 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21259 extract32(ctx
->opcode
, 2, 10) << 21 |
21260 extract32(ctx
->opcode
, 12, 9) << 12;
21262 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21263 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21270 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21272 switch (extract32(ctx
->opcode
, 18, 3)) {
21274 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21277 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21280 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21284 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21289 switch (ctx
->opcode
& 1) {
21291 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21294 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21300 switch (ctx
->opcode
& 1) {
21302 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21305 generate_exception_end(ctx
, EXCP_RI
);
21311 switch (ctx
->opcode
& 0x3) {
21313 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21316 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21319 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21322 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21327 generate_exception_end(ctx
, EXCP_RI
);
21334 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21336 switch (extract32(ctx
->opcode
, 12, 4)) {
21340 /* Break the TB to be able to sync copied instructions
21342 ctx
->base
.is_jmp
= DISAS_STOP
;
21345 /* Treat as NOP. */
21349 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21352 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21355 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21358 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21361 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21364 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21367 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21370 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21373 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21376 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21379 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21382 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21385 generate_exception_end(ctx
, EXCP_RI
);
21392 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21393 extract32(ctx
->opcode
, 0, 8);
21395 switch (extract32(ctx
->opcode
, 8, 3)) {
21397 switch (extract32(ctx
->opcode
, 11, 4)) {
21399 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21402 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21405 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21408 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21411 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21414 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21417 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21420 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21423 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21426 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21429 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21432 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21437 /* Break the TB to be able to sync copied instructions
21439 ctx
->base
.is_jmp
= DISAS_STOP
;
21442 /* Treat as NOP. */
21446 generate_exception_end(ctx
, EXCP_RI
);
21451 switch (extract32(ctx
->opcode
, 11, 4)) {
21456 TCGv t0
= tcg_temp_new();
21457 TCGv t1
= tcg_temp_new();
21459 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21461 switch (extract32(ctx
->opcode
, 11, 4)) {
21463 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21465 gen_store_gpr(t0
, rt
);
21468 gen_load_gpr(t1
, rt
);
21469 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21478 switch (ctx
->opcode
& 0x03) {
21480 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21484 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21489 switch (ctx
->opcode
& 0x03) {
21491 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, s
);
21495 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21500 check_cp0_enabled(ctx
);
21501 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21502 gen_cache_operation(ctx
, rt
, rs
, s
);
21508 switch (extract32(ctx
->opcode
, 11, 4)) {
21511 check_cp0_enabled(ctx
);
21512 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21516 check_cp0_enabled(ctx
);
21517 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21521 check_cp0_enabled(ctx
);
21522 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21526 /* case NM_SYNCIE */
21528 check_cp0_enabled(ctx
);
21529 /* Break the TB to be able to sync copied instructions
21531 ctx
->base
.is_jmp
= DISAS_STOP
;
21533 /* case NM_PREFE */
21535 check_cp0_enabled(ctx
);
21536 /* Treat as NOP. */
21541 check_cp0_enabled(ctx
);
21542 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21546 check_cp0_enabled(ctx
);
21547 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21551 check_cp0_enabled(ctx
);
21552 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21555 check_nms_dl_il_sl_tl_l2c(ctx
);
21556 gen_cache_operation(ctx
, rt
, rs
, s
);
21560 check_cp0_enabled(ctx
);
21561 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21565 check_cp0_enabled(ctx
);
21566 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21569 switch (extract32(ctx
->opcode
, 2, 2)) {
21573 check_cp0_enabled(ctx
);
21574 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21579 check_cp0_enabled(ctx
);
21580 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21583 generate_exception_end(ctx
, EXCP_RI
);
21588 switch (extract32(ctx
->opcode
, 2, 2)) {
21592 check_cp0_enabled(ctx
);
21593 gen_st_cond(ctx
, OPC_SCE
, rt
, rs
, s
);
21598 check_cp0_enabled(ctx
);
21599 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21602 generate_exception_end(ctx
, EXCP_RI
);
21612 int count
= extract32(ctx
->opcode
, 12, 3);
21615 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21616 extract32(ctx
->opcode
, 0, 8);
21617 TCGv va
= tcg_temp_new();
21618 TCGv t1
= tcg_temp_new();
21619 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21620 NM_P_LS_UAWM
? MO_UNALN
: 0;
21622 count
= (count
== 0) ? 8 : count
;
21623 while (counter
!= count
) {
21624 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21625 int this_offset
= offset
+ (counter
<< 2);
21627 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21629 switch (extract32(ctx
->opcode
, 11, 1)) {
21631 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21633 gen_store_gpr(t1
, this_rt
);
21634 if ((this_rt
== rs
) &&
21635 (counter
!= (count
- 1))) {
21636 /* UNPREDICTABLE */
21640 this_rt
= (rt
== 0) ? 0 : this_rt
;
21641 gen_load_gpr(t1
, this_rt
);
21642 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21653 generate_exception_end(ctx
, EXCP_RI
);
21661 TCGv t0
= tcg_temp_new();
21662 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21663 extract32(ctx
->opcode
, 1, 20) << 1;
21664 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21665 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21666 extract32(ctx
->opcode
, 21, 3));
21667 gen_load_gpr(t0
, rt
);
21668 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21669 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21675 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21676 extract32(ctx
->opcode
, 1, 24) << 1;
21678 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21680 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21683 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21688 switch (extract32(ctx
->opcode
, 12, 4)) {
21691 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21694 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21697 generate_exception_end(ctx
, EXCP_RI
);
21703 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21704 extract32(ctx
->opcode
, 1, 13) << 1;
21705 switch (extract32(ctx
->opcode
, 14, 2)) {
21708 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21711 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21712 extract32(ctx
->opcode
, 1, 13) << 1;
21713 check_cp1_enabled(ctx
);
21714 switch (extract32(ctx
->opcode
, 16, 5)) {
21716 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21719 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21724 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21725 extract32(ctx
->opcode
, 0, 1) << 13;
21727 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21732 generate_exception_end(ctx
, EXCP_RI
);
21738 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21740 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21744 if (rs
== rt
|| rt
== 0) {
21745 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21746 } else if (rs
== 0) {
21747 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21749 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21757 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21758 extract32(ctx
->opcode
, 1, 13) << 1;
21759 switch (extract32(ctx
->opcode
, 14, 2)) {
21762 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21765 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21767 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21769 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21773 if (rs
== 0 || rs
== rt
) {
21775 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21777 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21781 generate_exception_end(ctx
, EXCP_RI
);
21788 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21789 extract32(ctx
->opcode
, 1, 10) << 1;
21790 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21792 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21797 generate_exception_end(ctx
, EXCP_RI
);
21803 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21806 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21807 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
21808 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx
->opcode
));
21812 /* make sure instructions are on a halfword boundary */
21813 if (ctx
->base
.pc_next
& 0x1) {
21814 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21815 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21816 tcg_temp_free(tmp
);
21817 generate_exception_end(ctx
, EXCP_AdEL
);
21821 op
= extract32(ctx
->opcode
, 10, 6);
21824 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21827 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21828 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21831 switch (extract32(ctx
->opcode
, 3, 2)) {
21832 case NM_P16_SYSCALL
:
21833 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21834 generate_exception_end(ctx
, EXCP_SYSCALL
);
21836 generate_exception_end(ctx
, EXCP_RI
);
21840 generate_exception_end(ctx
, EXCP_BREAK
);
21843 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21844 gen_helper_do_semihosting(cpu_env
);
21846 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21847 generate_exception_end(ctx
, EXCP_RI
);
21849 generate_exception_end(ctx
, EXCP_DBp
);
21854 generate_exception_end(ctx
, EXCP_RI
);
21861 int shift
= extract32(ctx
->opcode
, 0, 3);
21863 shift
= (shift
== 0) ? 8 : shift
;
21865 switch (extract32(ctx
->opcode
, 3, 1)) {
21873 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21877 switch (ctx
->opcode
& 1) {
21879 gen_pool16c_nanomips_insn(ctx
);
21882 gen_ldxs(ctx
, rt
, rs
, rd
);
21887 switch (extract32(ctx
->opcode
, 6, 1)) {
21889 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21890 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21893 generate_exception_end(ctx
, EXCP_RI
);
21898 switch (extract32(ctx
->opcode
, 3, 1)) {
21900 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
21901 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
21903 case NM_P_ADDIURS5
:
21904 rt
= extract32(ctx
->opcode
, 5, 5);
21906 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
21907 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
21908 (extract32(ctx
->opcode
, 0, 3));
21909 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
21915 switch (ctx
->opcode
& 0x1) {
21917 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
21920 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
21925 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
21926 extract32(ctx
->opcode
, 5, 3);
21927 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
21928 extract32(ctx
->opcode
, 0, 3);
21929 rt
= decode_gpr_gpr4(rt
);
21930 rs
= decode_gpr_gpr4(rs
);
21931 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
21932 (extract32(ctx
->opcode
, 3, 1))) {
21935 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
21939 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
21942 generate_exception_end(ctx
, EXCP_RI
);
21948 int imm
= extract32(ctx
->opcode
, 0, 7);
21949 imm
= (imm
== 0x7f ? -1 : imm
);
21951 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21957 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
21958 u
= (u
== 12) ? 0xff :
21959 (u
== 13) ? 0xffff : u
;
21960 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
21964 offset
= extract32(ctx
->opcode
, 0, 2);
21965 switch (extract32(ctx
->opcode
, 2, 2)) {
21967 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
21970 rt
= decode_gpr_gpr3_src_store(
21971 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21972 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
21975 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
21978 generate_exception_end(ctx
, EXCP_RI
);
21983 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
21984 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
21986 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
21989 rt
= decode_gpr_gpr3_src_store(
21990 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
21991 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
21994 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
21997 generate_exception_end(ctx
, EXCP_RI
);
22002 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22003 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22006 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22007 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22008 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22012 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22013 extract32(ctx
->opcode
, 5, 3);
22014 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22015 extract32(ctx
->opcode
, 0, 3);
22016 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22017 (extract32(ctx
->opcode
, 8, 1) << 2);
22018 rt
= decode_gpr_gpr4(rt
);
22019 rs
= decode_gpr_gpr4(rs
);
22020 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22024 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22025 extract32(ctx
->opcode
, 5, 3);
22026 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22027 extract32(ctx
->opcode
, 0, 3);
22028 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22029 (extract32(ctx
->opcode
, 8, 1) << 2);
22030 rt
= decode_gpr_gpr4_zero(rt
);
22031 rs
= decode_gpr_gpr4(rs
);
22032 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22035 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22036 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22039 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22040 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22041 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22044 rt
= decode_gpr_gpr3_src_store(
22045 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
22046 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx
->opcode
));
22047 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22048 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22051 rt
= decode_gpr_gpr3_src_store(
22052 NANOMIPS_EXTRACT_RD(ctx
->opcode
));
22053 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22054 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22057 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22058 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22059 (extract32(ctx
->opcode
, 1, 9) << 1));
22062 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22063 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22064 (extract32(ctx
->opcode
, 1, 9) << 1));
22067 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22068 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22069 (extract32(ctx
->opcode
, 1, 6) << 1));
22072 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22073 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22074 (extract32(ctx
->opcode
, 1, 6) << 1));
22077 switch (ctx
->opcode
& 0xf) {
22080 switch (extract32(ctx
->opcode
, 4, 1)) {
22082 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22083 extract32(ctx
->opcode
, 5, 5), 0, 0);
22086 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22087 extract32(ctx
->opcode
, 5, 5), 31, 0);
22094 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22095 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22096 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22097 extract32(ctx
->opcode
, 0, 4) << 1);
22104 int count
= extract32(ctx
->opcode
, 0, 4);
22105 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22107 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22108 switch (extract32(ctx
->opcode
, 8, 1)) {
22110 gen_save(ctx
, rt
, count
, 0, u
);
22112 case NM_RESTORE_JRC16
:
22113 gen_restore(ctx
, rt
, count
, 0, u
);
22114 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22123 static const int gpr2reg1
[] = {4, 5, 6, 7};
22124 static const int gpr2reg2
[] = {5, 6, 7, 8};
22126 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22127 extract32(ctx
->opcode
, 8, 1);
22128 int r1
= gpr2reg1
[rd2
];
22129 int r2
= gpr2reg2
[rd2
];
22130 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22131 extract32(ctx
->opcode
, 0, 3);
22132 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22133 extract32(ctx
->opcode
, 5, 3);
22134 TCGv t0
= tcg_temp_new();
22135 TCGv t1
= tcg_temp_new();
22136 if (op
== NM_MOVEP
) {
22139 rs
= decode_gpr_gpr4_zero(r3
);
22140 rt
= decode_gpr_gpr4_zero(r4
);
22142 rd
= decode_gpr_gpr4(r3
);
22143 re
= decode_gpr_gpr4(r4
);
22147 gen_load_gpr(t0
, rs
);
22148 gen_load_gpr(t1
, rt
);
22149 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22150 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22156 return decode_nanomips_32_48_opc(env
, ctx
);
22163 /* SmartMIPS extension to MIPS32 */
22165 #if defined(TARGET_MIPS64)
22167 /* MDMX extension to MIPS64 */
22171 /* MIPSDSP functions. */
22172 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22173 int rd
, int base
, int offset
)
22178 t0
= tcg_temp_new();
22181 gen_load_gpr(t0
, offset
);
22182 } else if (offset
== 0) {
22183 gen_load_gpr(t0
, base
);
22185 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22190 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22191 gen_store_gpr(t0
, rd
);
22194 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22195 gen_store_gpr(t0
, rd
);
22198 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22199 gen_store_gpr(t0
, rd
);
22201 #if defined(TARGET_MIPS64)
22203 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22204 gen_store_gpr(t0
, rd
);
22211 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22212 int ret
, int v1
, int v2
)
22218 /* Treat as NOP. */
22222 v1_t
= tcg_temp_new();
22223 v2_t
= tcg_temp_new();
22225 gen_load_gpr(v1_t
, v1
);
22226 gen_load_gpr(v2_t
, v2
);
22229 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22230 case OPC_MULT_G_2E
:
22234 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22236 case OPC_ADDUH_R_QB
:
22237 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22240 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22242 case OPC_ADDQH_R_PH
:
22243 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22246 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22248 case OPC_ADDQH_R_W
:
22249 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22252 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22254 case OPC_SUBUH_R_QB
:
22255 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22258 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22260 case OPC_SUBQH_R_PH
:
22261 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22264 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22266 case OPC_SUBQH_R_W
:
22267 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22271 case OPC_ABSQ_S_PH_DSP
:
22273 case OPC_ABSQ_S_QB
:
22275 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22277 case OPC_ABSQ_S_PH
:
22279 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22283 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22285 case OPC_PRECEQ_W_PHL
:
22287 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22288 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22290 case OPC_PRECEQ_W_PHR
:
22292 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22293 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22294 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22296 case OPC_PRECEQU_PH_QBL
:
22298 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22300 case OPC_PRECEQU_PH_QBR
:
22302 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22304 case OPC_PRECEQU_PH_QBLA
:
22306 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22308 case OPC_PRECEQU_PH_QBRA
:
22310 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22312 case OPC_PRECEU_PH_QBL
:
22314 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22316 case OPC_PRECEU_PH_QBR
:
22318 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22320 case OPC_PRECEU_PH_QBLA
:
22322 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22324 case OPC_PRECEU_PH_QBRA
:
22326 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22330 case OPC_ADDU_QB_DSP
:
22334 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22336 case OPC_ADDQ_S_PH
:
22338 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22342 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22346 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22348 case OPC_ADDU_S_QB
:
22350 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22354 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22356 case OPC_ADDU_S_PH
:
22358 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22362 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22364 case OPC_SUBQ_S_PH
:
22366 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22370 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22374 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22376 case OPC_SUBU_S_QB
:
22378 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22382 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22384 case OPC_SUBU_S_PH
:
22386 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22390 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22394 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22398 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22400 case OPC_RADDU_W_QB
:
22402 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22406 case OPC_CMPU_EQ_QB_DSP
:
22408 case OPC_PRECR_QB_PH
:
22410 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22412 case OPC_PRECRQ_QB_PH
:
22414 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22416 case OPC_PRECR_SRA_PH_W
:
22419 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22420 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22422 tcg_temp_free_i32(sa_t
);
22425 case OPC_PRECR_SRA_R_PH_W
:
22428 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22429 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22431 tcg_temp_free_i32(sa_t
);
22434 case OPC_PRECRQ_PH_W
:
22436 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22438 case OPC_PRECRQ_RS_PH_W
:
22440 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22442 case OPC_PRECRQU_S_QB_PH
:
22444 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22448 #ifdef TARGET_MIPS64
22449 case OPC_ABSQ_S_QH_DSP
:
22451 case OPC_PRECEQ_L_PWL
:
22453 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22455 case OPC_PRECEQ_L_PWR
:
22457 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22459 case OPC_PRECEQ_PW_QHL
:
22461 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22463 case OPC_PRECEQ_PW_QHR
:
22465 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22467 case OPC_PRECEQ_PW_QHLA
:
22469 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22471 case OPC_PRECEQ_PW_QHRA
:
22473 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22475 case OPC_PRECEQU_QH_OBL
:
22477 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22479 case OPC_PRECEQU_QH_OBR
:
22481 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22483 case OPC_PRECEQU_QH_OBLA
:
22485 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22487 case OPC_PRECEQU_QH_OBRA
:
22489 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22491 case OPC_PRECEU_QH_OBL
:
22493 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22495 case OPC_PRECEU_QH_OBR
:
22497 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22499 case OPC_PRECEU_QH_OBLA
:
22501 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22503 case OPC_PRECEU_QH_OBRA
:
22505 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22507 case OPC_ABSQ_S_OB
:
22509 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22511 case OPC_ABSQ_S_PW
:
22513 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22515 case OPC_ABSQ_S_QH
:
22517 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22521 case OPC_ADDU_OB_DSP
:
22523 case OPC_RADDU_L_OB
:
22525 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22529 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22531 case OPC_SUBQ_S_PW
:
22533 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22537 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22539 case OPC_SUBQ_S_QH
:
22541 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22545 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22547 case OPC_SUBU_S_OB
:
22549 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22553 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22555 case OPC_SUBU_S_QH
:
22557 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22561 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22563 case OPC_SUBUH_R_OB
:
22565 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22569 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22571 case OPC_ADDQ_S_PW
:
22573 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22577 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22579 case OPC_ADDQ_S_QH
:
22581 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22585 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22587 case OPC_ADDU_S_OB
:
22589 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22593 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22595 case OPC_ADDU_S_QH
:
22597 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22601 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22603 case OPC_ADDUH_R_OB
:
22605 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22609 case OPC_CMPU_EQ_OB_DSP
:
22611 case OPC_PRECR_OB_QH
:
22613 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22615 case OPC_PRECR_SRA_QH_PW
:
22618 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22619 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22620 tcg_temp_free_i32(ret_t
);
22623 case OPC_PRECR_SRA_R_QH_PW
:
22626 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22627 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22628 tcg_temp_free_i32(sa_v
);
22631 case OPC_PRECRQ_OB_QH
:
22633 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22635 case OPC_PRECRQ_PW_L
:
22637 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22639 case OPC_PRECRQ_QH_PW
:
22641 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22643 case OPC_PRECRQ_RS_QH_PW
:
22645 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22647 case OPC_PRECRQU_S_OB_QH
:
22649 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22656 tcg_temp_free(v1_t
);
22657 tcg_temp_free(v2_t
);
22660 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22661 int ret
, int v1
, int v2
)
22669 /* Treat as NOP. */
22673 t0
= tcg_temp_new();
22674 v1_t
= tcg_temp_new();
22675 v2_t
= tcg_temp_new();
22677 tcg_gen_movi_tl(t0
, v1
);
22678 gen_load_gpr(v1_t
, v1
);
22679 gen_load_gpr(v2_t
, v2
);
22682 case OPC_SHLL_QB_DSP
:
22684 op2
= MASK_SHLL_QB(ctx
->opcode
);
22688 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22692 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22696 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22700 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22702 case OPC_SHLL_S_PH
:
22704 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22706 case OPC_SHLLV_S_PH
:
22708 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22712 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22714 case OPC_SHLLV_S_W
:
22716 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22720 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22724 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22728 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22732 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22736 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22738 case OPC_SHRA_R_QB
:
22740 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22744 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22746 case OPC_SHRAV_R_QB
:
22748 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22752 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22754 case OPC_SHRA_R_PH
:
22756 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22760 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22762 case OPC_SHRAV_R_PH
:
22764 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22768 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22770 case OPC_SHRAV_R_W
:
22772 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22774 default: /* Invalid */
22775 MIPS_INVAL("MASK SHLL.QB");
22776 generate_exception_end(ctx
, EXCP_RI
);
22781 #ifdef TARGET_MIPS64
22782 case OPC_SHLL_OB_DSP
:
22783 op2
= MASK_SHLL_OB(ctx
->opcode
);
22787 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22791 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22793 case OPC_SHLL_S_PW
:
22795 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22797 case OPC_SHLLV_S_PW
:
22799 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22803 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22807 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22811 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22815 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22817 case OPC_SHLL_S_QH
:
22819 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22821 case OPC_SHLLV_S_QH
:
22823 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22827 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22831 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22833 case OPC_SHRA_R_OB
:
22835 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22837 case OPC_SHRAV_R_OB
:
22839 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22843 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22847 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22849 case OPC_SHRA_R_PW
:
22851 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22853 case OPC_SHRAV_R_PW
:
22855 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22859 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22863 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22865 case OPC_SHRA_R_QH
:
22867 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22869 case OPC_SHRAV_R_QH
:
22871 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22875 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22879 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22883 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22887 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22889 default: /* Invalid */
22890 MIPS_INVAL("MASK SHLL.OB");
22891 generate_exception_end(ctx
, EXCP_RI
);
22899 tcg_temp_free(v1_t
);
22900 tcg_temp_free(v2_t
);
22903 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22904 int ret
, int v1
, int v2
, int check_ret
)
22910 if ((ret
== 0) && (check_ret
== 1)) {
22911 /* Treat as NOP. */
22915 t0
= tcg_temp_new_i32();
22916 v1_t
= tcg_temp_new();
22917 v2_t
= tcg_temp_new();
22919 tcg_gen_movi_i32(t0
, ret
);
22920 gen_load_gpr(v1_t
, v1
);
22921 gen_load_gpr(v2_t
, v2
);
22924 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22925 * the same mask and op1. */
22926 case OPC_MULT_G_2E
:
22930 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22933 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22936 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22938 case OPC_MULQ_RS_W
:
22939 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22943 case OPC_DPA_W_PH_DSP
:
22945 case OPC_DPAU_H_QBL
:
22947 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22949 case OPC_DPAU_H_QBR
:
22951 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22953 case OPC_DPSU_H_QBL
:
22955 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
22957 case OPC_DPSU_H_QBR
:
22959 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
22963 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22965 case OPC_DPAX_W_PH
:
22967 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22969 case OPC_DPAQ_S_W_PH
:
22971 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22973 case OPC_DPAQX_S_W_PH
:
22975 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22977 case OPC_DPAQX_SA_W_PH
:
22979 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22983 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22985 case OPC_DPSX_W_PH
:
22987 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22989 case OPC_DPSQ_S_W_PH
:
22991 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22993 case OPC_DPSQX_S_W_PH
:
22995 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
22997 case OPC_DPSQX_SA_W_PH
:
22999 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23001 case OPC_MULSAQ_S_W_PH
:
23003 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23005 case OPC_DPAQ_SA_L_W
:
23007 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23009 case OPC_DPSQ_SA_L_W
:
23011 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23013 case OPC_MAQ_S_W_PHL
:
23015 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23017 case OPC_MAQ_S_W_PHR
:
23019 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23021 case OPC_MAQ_SA_W_PHL
:
23023 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23025 case OPC_MAQ_SA_W_PHR
:
23027 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23029 case OPC_MULSA_W_PH
:
23031 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23035 #ifdef TARGET_MIPS64
23036 case OPC_DPAQ_W_QH_DSP
:
23038 int ac
= ret
& 0x03;
23039 tcg_gen_movi_i32(t0
, ac
);
23044 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23048 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23052 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23056 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23060 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23062 case OPC_DPAQ_S_W_QH
:
23064 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23066 case OPC_DPAQ_SA_L_PW
:
23068 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23070 case OPC_DPAU_H_OBL
:
23072 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23074 case OPC_DPAU_H_OBR
:
23076 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23080 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23082 case OPC_DPSQ_S_W_QH
:
23084 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23086 case OPC_DPSQ_SA_L_PW
:
23088 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23090 case OPC_DPSU_H_OBL
:
23092 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23094 case OPC_DPSU_H_OBR
:
23096 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23098 case OPC_MAQ_S_L_PWL
:
23100 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23102 case OPC_MAQ_S_L_PWR
:
23104 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23106 case OPC_MAQ_S_W_QHLL
:
23108 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23110 case OPC_MAQ_SA_W_QHLL
:
23112 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23114 case OPC_MAQ_S_W_QHLR
:
23116 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23118 case OPC_MAQ_SA_W_QHLR
:
23120 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23122 case OPC_MAQ_S_W_QHRL
:
23124 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23126 case OPC_MAQ_SA_W_QHRL
:
23128 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23130 case OPC_MAQ_S_W_QHRR
:
23132 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23134 case OPC_MAQ_SA_W_QHRR
:
23136 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23138 case OPC_MULSAQ_S_L_PW
:
23140 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23142 case OPC_MULSAQ_S_W_QH
:
23144 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23150 case OPC_ADDU_QB_DSP
:
23152 case OPC_MULEU_S_PH_QBL
:
23154 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23156 case OPC_MULEU_S_PH_QBR
:
23158 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23160 case OPC_MULQ_RS_PH
:
23162 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23164 case OPC_MULEQ_S_W_PHL
:
23166 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23168 case OPC_MULEQ_S_W_PHR
:
23170 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23172 case OPC_MULQ_S_PH
:
23174 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23178 #ifdef TARGET_MIPS64
23179 case OPC_ADDU_OB_DSP
:
23181 case OPC_MULEQ_S_PW_QHL
:
23183 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23185 case OPC_MULEQ_S_PW_QHR
:
23187 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23189 case OPC_MULEU_S_QH_OBL
:
23191 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23193 case OPC_MULEU_S_QH_OBR
:
23195 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23197 case OPC_MULQ_RS_QH
:
23199 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23206 tcg_temp_free_i32(t0
);
23207 tcg_temp_free(v1_t
);
23208 tcg_temp_free(v2_t
);
23211 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23219 /* Treat as NOP. */
23223 t0
= tcg_temp_new();
23224 val_t
= tcg_temp_new();
23225 gen_load_gpr(val_t
, val
);
23228 case OPC_ABSQ_S_PH_DSP
:
23232 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23237 target_long result
;
23238 imm
= (ctx
->opcode
>> 16) & 0xFF;
23239 result
= (uint32_t)imm
<< 24 |
23240 (uint32_t)imm
<< 16 |
23241 (uint32_t)imm
<< 8 |
23243 result
= (int32_t)result
;
23244 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23249 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23250 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23251 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23252 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23253 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23254 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23259 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23260 imm
= (int16_t)(imm
<< 6) >> 6;
23261 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23262 (target_long
)((int32_t)imm
<< 16 | \
23268 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23269 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23270 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23271 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23275 #ifdef TARGET_MIPS64
23276 case OPC_ABSQ_S_QH_DSP
:
23283 imm
= (ctx
->opcode
>> 16) & 0xFF;
23284 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23285 temp
= (temp
<< 16) | temp
;
23286 temp
= (temp
<< 32) | temp
;
23287 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23295 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23296 imm
= (int16_t)(imm
<< 6) >> 6;
23297 temp
= ((target_long
)imm
<< 32) \
23298 | ((target_long
)imm
& 0xFFFFFFFF);
23299 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23307 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23308 imm
= (int16_t)(imm
<< 6) >> 6;
23310 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23311 ((uint64_t)(uint16_t)imm
<< 32) |
23312 ((uint64_t)(uint16_t)imm
<< 16) |
23313 (uint64_t)(uint16_t)imm
;
23314 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23319 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23320 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23321 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23322 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23323 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23324 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23325 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23329 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23330 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23331 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23335 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23336 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23337 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23338 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23339 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23346 tcg_temp_free(val_t
);
23349 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23350 uint32_t op1
, uint32_t op2
,
23351 int ret
, int v1
, int v2
, int check_ret
)
23357 if ((ret
== 0) && (check_ret
== 1)) {
23358 /* Treat as NOP. */
23362 t1
= tcg_temp_new();
23363 v1_t
= tcg_temp_new();
23364 v2_t
= tcg_temp_new();
23366 gen_load_gpr(v1_t
, v1
);
23367 gen_load_gpr(v2_t
, v2
);
23370 case OPC_CMPU_EQ_QB_DSP
:
23372 case OPC_CMPU_EQ_QB
:
23374 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23376 case OPC_CMPU_LT_QB
:
23378 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23380 case OPC_CMPU_LE_QB
:
23382 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23384 case OPC_CMPGU_EQ_QB
:
23386 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23388 case OPC_CMPGU_LT_QB
:
23390 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23392 case OPC_CMPGU_LE_QB
:
23394 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23396 case OPC_CMPGDU_EQ_QB
:
23398 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23399 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23400 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23401 tcg_gen_shli_tl(t1
, t1
, 24);
23402 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23404 case OPC_CMPGDU_LT_QB
:
23406 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23407 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23408 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23409 tcg_gen_shli_tl(t1
, t1
, 24);
23410 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23412 case OPC_CMPGDU_LE_QB
:
23414 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23415 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23416 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23417 tcg_gen_shli_tl(t1
, t1
, 24);
23418 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23420 case OPC_CMP_EQ_PH
:
23422 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23424 case OPC_CMP_LT_PH
:
23426 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23428 case OPC_CMP_LE_PH
:
23430 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23434 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23438 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23440 case OPC_PACKRL_PH
:
23442 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23446 #ifdef TARGET_MIPS64
23447 case OPC_CMPU_EQ_OB_DSP
:
23449 case OPC_CMP_EQ_PW
:
23451 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23453 case OPC_CMP_LT_PW
:
23455 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23457 case OPC_CMP_LE_PW
:
23459 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23461 case OPC_CMP_EQ_QH
:
23463 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23465 case OPC_CMP_LT_QH
:
23467 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23469 case OPC_CMP_LE_QH
:
23471 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23473 case OPC_CMPGDU_EQ_OB
:
23475 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23477 case OPC_CMPGDU_LT_OB
:
23479 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23481 case OPC_CMPGDU_LE_OB
:
23483 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23485 case OPC_CMPGU_EQ_OB
:
23487 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23489 case OPC_CMPGU_LT_OB
:
23491 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23493 case OPC_CMPGU_LE_OB
:
23495 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23497 case OPC_CMPU_EQ_OB
:
23499 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23501 case OPC_CMPU_LT_OB
:
23503 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23505 case OPC_CMPU_LE_OB
:
23507 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23509 case OPC_PACKRL_PW
:
23511 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23515 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23519 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23523 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23531 tcg_temp_free(v1_t
);
23532 tcg_temp_free(v2_t
);
23535 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23536 uint32_t op1
, int rt
, int rs
, int sa
)
23543 /* Treat as NOP. */
23547 t0
= tcg_temp_new();
23548 gen_load_gpr(t0
, rs
);
23551 case OPC_APPEND_DSP
:
23552 switch (MASK_APPEND(ctx
->opcode
)) {
23555 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23557 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23561 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23562 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23563 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23564 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23566 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23570 if (sa
!= 0 && sa
!= 2) {
23571 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23572 tcg_gen_ext32u_tl(t0
, t0
);
23573 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23574 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23576 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23578 default: /* Invalid */
23579 MIPS_INVAL("MASK APPEND");
23580 generate_exception_end(ctx
, EXCP_RI
);
23584 #ifdef TARGET_MIPS64
23585 case OPC_DAPPEND_DSP
:
23586 switch (MASK_DAPPEND(ctx
->opcode
)) {
23589 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23593 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23594 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23595 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23599 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23600 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23601 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23606 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23607 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23608 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23609 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23612 default: /* Invalid */
23613 MIPS_INVAL("MASK DAPPEND");
23614 generate_exception_end(ctx
, EXCP_RI
);
23623 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23624 int ret
, int v1
, int v2
, int check_ret
)
23633 if ((ret
== 0) && (check_ret
== 1)) {
23634 /* Treat as NOP. */
23638 t0
= tcg_temp_new();
23639 t1
= tcg_temp_new();
23640 v1_t
= tcg_temp_new();
23641 v2_t
= tcg_temp_new();
23643 gen_load_gpr(v1_t
, v1
);
23644 gen_load_gpr(v2_t
, v2
);
23647 case OPC_EXTR_W_DSP
:
23651 tcg_gen_movi_tl(t0
, v2
);
23652 tcg_gen_movi_tl(t1
, v1
);
23653 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23656 tcg_gen_movi_tl(t0
, v2
);
23657 tcg_gen_movi_tl(t1
, v1
);
23658 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23660 case OPC_EXTR_RS_W
:
23661 tcg_gen_movi_tl(t0
, v2
);
23662 tcg_gen_movi_tl(t1
, v1
);
23663 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23666 tcg_gen_movi_tl(t0
, v2
);
23667 tcg_gen_movi_tl(t1
, v1
);
23668 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23670 case OPC_EXTRV_S_H
:
23671 tcg_gen_movi_tl(t0
, v2
);
23672 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23675 tcg_gen_movi_tl(t0
, v2
);
23676 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23678 case OPC_EXTRV_R_W
:
23679 tcg_gen_movi_tl(t0
, v2
);
23680 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23682 case OPC_EXTRV_RS_W
:
23683 tcg_gen_movi_tl(t0
, v2
);
23684 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23687 tcg_gen_movi_tl(t0
, v2
);
23688 tcg_gen_movi_tl(t1
, v1
);
23689 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23692 tcg_gen_movi_tl(t0
, v2
);
23693 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23696 tcg_gen_movi_tl(t0
, v2
);
23697 tcg_gen_movi_tl(t1
, v1
);
23698 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23701 tcg_gen_movi_tl(t0
, v2
);
23702 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23705 imm
= (ctx
->opcode
>> 20) & 0x3F;
23706 tcg_gen_movi_tl(t0
, ret
);
23707 tcg_gen_movi_tl(t1
, imm
);
23708 gen_helper_shilo(t0
, t1
, cpu_env
);
23711 tcg_gen_movi_tl(t0
, ret
);
23712 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23715 tcg_gen_movi_tl(t0
, ret
);
23716 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23719 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23720 tcg_gen_movi_tl(t0
, imm
);
23721 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23724 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23725 tcg_gen_movi_tl(t0
, imm
);
23726 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23730 #ifdef TARGET_MIPS64
23731 case OPC_DEXTR_W_DSP
:
23735 tcg_gen_movi_tl(t0
, ret
);
23736 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23740 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23741 int ac
= (ctx
->opcode
>> 11) & 0x03;
23742 tcg_gen_movi_tl(t0
, shift
);
23743 tcg_gen_movi_tl(t1
, ac
);
23744 gen_helper_dshilo(t0
, t1
, cpu_env
);
23749 int ac
= (ctx
->opcode
>> 11) & 0x03;
23750 tcg_gen_movi_tl(t0
, ac
);
23751 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23755 tcg_gen_movi_tl(t0
, v2
);
23756 tcg_gen_movi_tl(t1
, v1
);
23758 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23761 tcg_gen_movi_tl(t0
, v2
);
23762 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23765 tcg_gen_movi_tl(t0
, v2
);
23766 tcg_gen_movi_tl(t1
, v1
);
23767 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23770 tcg_gen_movi_tl(t0
, v2
);
23771 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23774 tcg_gen_movi_tl(t0
, v2
);
23775 tcg_gen_movi_tl(t1
, v1
);
23776 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23778 case OPC_DEXTR_R_L
:
23779 tcg_gen_movi_tl(t0
, v2
);
23780 tcg_gen_movi_tl(t1
, v1
);
23781 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23783 case OPC_DEXTR_RS_L
:
23784 tcg_gen_movi_tl(t0
, v2
);
23785 tcg_gen_movi_tl(t1
, v1
);
23786 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23789 tcg_gen_movi_tl(t0
, v2
);
23790 tcg_gen_movi_tl(t1
, v1
);
23791 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23793 case OPC_DEXTR_R_W
:
23794 tcg_gen_movi_tl(t0
, v2
);
23795 tcg_gen_movi_tl(t1
, v1
);
23796 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23798 case OPC_DEXTR_RS_W
:
23799 tcg_gen_movi_tl(t0
, v2
);
23800 tcg_gen_movi_tl(t1
, v1
);
23801 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23803 case OPC_DEXTR_S_H
:
23804 tcg_gen_movi_tl(t0
, v2
);
23805 tcg_gen_movi_tl(t1
, v1
);
23806 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23808 case OPC_DEXTRV_S_H
:
23809 tcg_gen_movi_tl(t0
, v2
);
23810 tcg_gen_movi_tl(t1
, v1
);
23811 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23814 tcg_gen_movi_tl(t0
, v2
);
23815 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23817 case OPC_DEXTRV_R_L
:
23818 tcg_gen_movi_tl(t0
, v2
);
23819 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23821 case OPC_DEXTRV_RS_L
:
23822 tcg_gen_movi_tl(t0
, v2
);
23823 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23826 tcg_gen_movi_tl(t0
, v2
);
23827 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23829 case OPC_DEXTRV_R_W
:
23830 tcg_gen_movi_tl(t0
, v2
);
23831 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23833 case OPC_DEXTRV_RS_W
:
23834 tcg_gen_movi_tl(t0
, v2
);
23835 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23844 tcg_temp_free(v1_t
);
23845 tcg_temp_free(v2_t
);
23848 /* End MIPSDSP functions. */
23850 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23852 int rs
, rt
, rd
, sa
;
23855 rs
= (ctx
->opcode
>> 21) & 0x1f;
23856 rt
= (ctx
->opcode
>> 16) & 0x1f;
23857 rd
= (ctx
->opcode
>> 11) & 0x1f;
23858 sa
= (ctx
->opcode
>> 6) & 0x1f;
23860 op1
= MASK_SPECIAL(ctx
->opcode
);
23863 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23869 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23879 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23882 MIPS_INVAL("special_r6 muldiv");
23883 generate_exception_end(ctx
, EXCP_RI
);
23889 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23893 if (rt
== 0 && sa
== 1) {
23894 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23895 We need additionally to check other fields */
23896 gen_cl(ctx
, op1
, rd
, rs
);
23898 generate_exception_end(ctx
, EXCP_RI
);
23902 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
23903 gen_helper_do_semihosting(cpu_env
);
23905 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
23906 generate_exception_end(ctx
, EXCP_RI
);
23908 generate_exception_end(ctx
, EXCP_DBp
);
23912 #if defined(TARGET_MIPS64)
23914 check_mips_64(ctx
);
23915 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23919 if (rt
== 0 && sa
== 1) {
23920 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
23921 We need additionally to check other fields */
23922 check_mips_64(ctx
);
23923 gen_cl(ctx
, op1
, rd
, rs
);
23925 generate_exception_end(ctx
, EXCP_RI
);
23933 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23943 check_mips_64(ctx
);
23944 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23947 MIPS_INVAL("special_r6 muldiv");
23948 generate_exception_end(ctx
, EXCP_RI
);
23953 default: /* Invalid */
23954 MIPS_INVAL("special_r6");
23955 generate_exception_end(ctx
, EXCP_RI
);
23960 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
23962 int rs
= extract32(ctx
->opcode
, 21, 5);
23963 int rt
= extract32(ctx
->opcode
, 16, 5);
23964 int rd
= extract32(ctx
->opcode
, 11, 5);
23965 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
23968 case OPC_MOVN
: /* Conditional move */
23970 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23972 case OPC_MFHI
: /* Move from HI/LO */
23974 gen_HILO(ctx
, op1
, 0, rd
);
23977 case OPC_MTLO
: /* Move to HI/LO */
23978 gen_HILO(ctx
, op1
, 0, rs
);
23982 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
23986 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23988 #if defined(TARGET_MIPS64)
23993 check_insn_opc_user_only(ctx
, INSN_R5900
);
23994 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
23998 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24000 default: /* Invalid */
24001 MIPS_INVAL("special_tx79");
24002 generate_exception_end(ctx
, EXCP_RI
);
24007 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24009 int rs
, rt
, rd
, sa
;
24012 rs
= (ctx
->opcode
>> 21) & 0x1f;
24013 rt
= (ctx
->opcode
>> 16) & 0x1f;
24014 rd
= (ctx
->opcode
>> 11) & 0x1f;
24015 sa
= (ctx
->opcode
>> 6) & 0x1f;
24017 op1
= MASK_SPECIAL(ctx
->opcode
);
24019 case OPC_MOVN
: /* Conditional move */
24021 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24022 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24023 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24025 case OPC_MFHI
: /* Move from HI/LO */
24027 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24030 case OPC_MTLO
: /* Move to HI/LO */
24031 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24034 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24035 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24036 check_cp1_enabled(ctx
);
24037 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24038 (ctx
->opcode
>> 16) & 1);
24040 generate_exception_err(ctx
, EXCP_CpU
, 1);
24046 check_insn(ctx
, INSN_VR54XX
);
24047 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24048 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24050 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24055 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24057 #if defined(TARGET_MIPS64)
24062 check_insn(ctx
, ISA_MIPS3
);
24063 check_mips_64(ctx
);
24064 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24068 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24071 #ifdef MIPS_STRICT_STANDARD
24072 MIPS_INVAL("SPIM");
24073 generate_exception_end(ctx
, EXCP_RI
);
24075 /* Implemented as RI exception for now. */
24076 MIPS_INVAL("spim (unofficial)");
24077 generate_exception_end(ctx
, EXCP_RI
);
24080 default: /* Invalid */
24081 MIPS_INVAL("special_legacy");
24082 generate_exception_end(ctx
, EXCP_RI
);
24087 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24089 int rs
, rt
, rd
, sa
;
24092 rs
= (ctx
->opcode
>> 21) & 0x1f;
24093 rt
= (ctx
->opcode
>> 16) & 0x1f;
24094 rd
= (ctx
->opcode
>> 11) & 0x1f;
24095 sa
= (ctx
->opcode
>> 6) & 0x1f;
24097 op1
= MASK_SPECIAL(ctx
->opcode
);
24099 case OPC_SLL
: /* Shift with immediate */
24100 if (sa
== 5 && rd
== 0 &&
24101 rs
== 0 && rt
== 0) { /* PAUSE */
24102 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24103 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24104 generate_exception_end(ctx
, EXCP_RI
);
24110 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24113 switch ((ctx
->opcode
>> 21) & 0x1f) {
24115 /* rotr is decoded as srl on non-R2 CPUs */
24116 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24121 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24124 generate_exception_end(ctx
, EXCP_RI
);
24132 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24134 case OPC_SLLV
: /* Shifts */
24136 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24139 switch ((ctx
->opcode
>> 6) & 0x1f) {
24141 /* rotrv is decoded as srlv on non-R2 CPUs */
24142 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24147 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24150 generate_exception_end(ctx
, EXCP_RI
);
24154 case OPC_SLT
: /* Set on less than */
24156 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24158 case OPC_AND
: /* Logic*/
24162 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24165 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24167 case OPC_TGE
: /* Traps */
24173 check_insn(ctx
, ISA_MIPS2
);
24174 gen_trap(ctx
, op1
, rs
, rt
, -1);
24176 case OPC_LSA
: /* OPC_PMON */
24177 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24178 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24179 decode_opc_special_r6(env
, ctx
);
24181 /* Pmon entry point, also R4010 selsl */
24182 #ifdef MIPS_STRICT_STANDARD
24183 MIPS_INVAL("PMON / selsl");
24184 generate_exception_end(ctx
, EXCP_RI
);
24186 gen_helper_0e0i(pmon
, sa
);
24191 generate_exception_end(ctx
, EXCP_SYSCALL
);
24194 generate_exception_end(ctx
, EXCP_BREAK
);
24197 check_insn(ctx
, ISA_MIPS2
);
24198 gen_sync(extract32(ctx
->opcode
, 6, 5));
24201 #if defined(TARGET_MIPS64)
24202 /* MIPS64 specific opcodes */
24207 check_insn(ctx
, ISA_MIPS3
);
24208 check_mips_64(ctx
);
24209 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24212 switch ((ctx
->opcode
>> 21) & 0x1f) {
24214 /* drotr is decoded as dsrl on non-R2 CPUs */
24215 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24220 check_insn(ctx
, ISA_MIPS3
);
24221 check_mips_64(ctx
);
24222 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24225 generate_exception_end(ctx
, EXCP_RI
);
24230 switch ((ctx
->opcode
>> 21) & 0x1f) {
24232 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24233 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24238 check_insn(ctx
, ISA_MIPS3
);
24239 check_mips_64(ctx
);
24240 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24243 generate_exception_end(ctx
, EXCP_RI
);
24251 check_insn(ctx
, ISA_MIPS3
);
24252 check_mips_64(ctx
);
24253 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24257 check_insn(ctx
, ISA_MIPS3
);
24258 check_mips_64(ctx
);
24259 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24262 switch ((ctx
->opcode
>> 6) & 0x1f) {
24264 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24265 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24270 check_insn(ctx
, ISA_MIPS3
);
24271 check_mips_64(ctx
);
24272 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24275 generate_exception_end(ctx
, EXCP_RI
);
24280 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24281 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24282 decode_opc_special_r6(env
, ctx
);
24287 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24288 decode_opc_special_r6(env
, ctx
);
24289 } else if (ctx
->insn_flags
& INSN_R5900
) {
24290 decode_opc_special_tx79(env
, ctx
);
24292 decode_opc_special_legacy(env
, ctx
);
24298 #if !defined(TARGET_MIPS64)
24300 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24301 #define MXU_APTN1_A 0
24302 #define MXU_APTN1_S 1
24304 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24305 #define MXU_APTN2_AA 0
24306 #define MXU_APTN2_AS 1
24307 #define MXU_APTN2_SA 2
24308 #define MXU_APTN2_SS 3
24310 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24311 #define MXU_EPTN2_AA 0
24312 #define MXU_EPTN2_AS 1
24313 #define MXU_EPTN2_SA 2
24314 #define MXU_EPTN2_SS 3
24316 /* MXU operand getting pattern 'optn2' */
24317 #define MXU_OPTN2_PTN0 0
24318 #define MXU_OPTN2_PTN1 1
24319 #define MXU_OPTN2_PTN2 2
24320 #define MXU_OPTN2_PTN3 3
24321 /* alternative naming scheme for 'optn2' */
24322 #define MXU_OPTN2_WW 0
24323 #define MXU_OPTN2_LW 1
24324 #define MXU_OPTN2_HW 2
24325 #define MXU_OPTN2_XW 3
24327 /* MXU operand getting pattern 'optn3' */
24328 #define MXU_OPTN3_PTN0 0
24329 #define MXU_OPTN3_PTN1 1
24330 #define MXU_OPTN3_PTN2 2
24331 #define MXU_OPTN3_PTN3 3
24332 #define MXU_OPTN3_PTN4 4
24333 #define MXU_OPTN3_PTN5 5
24334 #define MXU_OPTN3_PTN6 6
24335 #define MXU_OPTN3_PTN7 7
24339 * S32I2M XRa, rb - Register move from GRF to XRF
24341 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24346 t0
= tcg_temp_new();
24348 XRa
= extract32(ctx
->opcode
, 6, 5);
24349 Rb
= extract32(ctx
->opcode
, 16, 5);
24351 gen_load_gpr(t0
, Rb
);
24353 gen_store_mxu_gpr(t0
, XRa
);
24354 } else if (XRa
== 16) {
24355 gen_store_mxu_cr(t0
);
24362 * S32M2I XRa, rb - Register move from XRF to GRF
24364 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24369 t0
= tcg_temp_new();
24371 XRa
= extract32(ctx
->opcode
, 6, 5);
24372 Rb
= extract32(ctx
->opcode
, 16, 5);
24375 gen_load_mxu_gpr(t0
, XRa
);
24376 } else if (XRa
== 16) {
24377 gen_load_mxu_cr(t0
);
24380 gen_store_gpr(t0
, Rb
);
24386 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24388 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24391 uint32_t XRa
, Rb
, s8
, optn3
;
24393 t0
= tcg_temp_new();
24394 t1
= tcg_temp_new();
24396 XRa
= extract32(ctx
->opcode
, 6, 4);
24397 s8
= extract32(ctx
->opcode
, 10, 8);
24398 optn3
= extract32(ctx
->opcode
, 18, 3);
24399 Rb
= extract32(ctx
->opcode
, 21, 5);
24401 gen_load_gpr(t0
, Rb
);
24402 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24405 /* XRa[7:0] = tmp8 */
24406 case MXU_OPTN3_PTN0
:
24407 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24408 gen_load_mxu_gpr(t0
, XRa
);
24409 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24411 /* XRa[15:8] = tmp8 */
24412 case MXU_OPTN3_PTN1
:
24413 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24414 gen_load_mxu_gpr(t0
, XRa
);
24415 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24417 /* XRa[23:16] = tmp8 */
24418 case MXU_OPTN3_PTN2
:
24419 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24420 gen_load_mxu_gpr(t0
, XRa
);
24421 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24423 /* XRa[31:24] = tmp8 */
24424 case MXU_OPTN3_PTN3
:
24425 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24426 gen_load_mxu_gpr(t0
, XRa
);
24427 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24429 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24430 case MXU_OPTN3_PTN4
:
24431 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24432 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24434 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24435 case MXU_OPTN3_PTN5
:
24436 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24437 tcg_gen_shli_tl(t1
, t1
, 8);
24438 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24440 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24441 case MXU_OPTN3_PTN6
:
24442 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24443 tcg_gen_mov_tl(t0
, t1
);
24444 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24445 tcg_gen_shli_tl(t1
, t1
, 16);
24446 tcg_gen_or_tl(t0
, t0
, t1
);
24448 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24449 case MXU_OPTN3_PTN7
:
24450 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24451 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24452 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24456 gen_store_mxu_gpr(t0
, XRa
);
24463 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24465 static void gen_mxu_d16mul(DisasContext
*ctx
)
24467 TCGv t0
, t1
, t2
, t3
;
24468 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24470 t0
= tcg_temp_new();
24471 t1
= tcg_temp_new();
24472 t2
= tcg_temp_new();
24473 t3
= tcg_temp_new();
24475 XRa
= extract32(ctx
->opcode
, 6, 4);
24476 XRb
= extract32(ctx
->opcode
, 10, 4);
24477 XRc
= extract32(ctx
->opcode
, 14, 4);
24478 XRd
= extract32(ctx
->opcode
, 18, 4);
24479 optn2
= extract32(ctx
->opcode
, 22, 2);
24481 gen_load_mxu_gpr(t1
, XRb
);
24482 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24483 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24484 gen_load_mxu_gpr(t3
, XRc
);
24485 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24486 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24489 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24490 tcg_gen_mul_tl(t3
, t1
, t3
);
24491 tcg_gen_mul_tl(t2
, t0
, t2
);
24493 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24494 tcg_gen_mul_tl(t3
, t0
, t3
);
24495 tcg_gen_mul_tl(t2
, t0
, t2
);
24497 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24498 tcg_gen_mul_tl(t3
, t1
, t3
);
24499 tcg_gen_mul_tl(t2
, t1
, t2
);
24501 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24502 tcg_gen_mul_tl(t3
, t0
, t3
);
24503 tcg_gen_mul_tl(t2
, t1
, t2
);
24506 gen_store_mxu_gpr(t3
, XRa
);
24507 gen_store_mxu_gpr(t2
, XRd
);
24516 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24519 static void gen_mxu_d16mac(DisasContext
*ctx
)
24521 TCGv t0
, t1
, t2
, t3
;
24522 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24524 t0
= tcg_temp_new();
24525 t1
= tcg_temp_new();
24526 t2
= tcg_temp_new();
24527 t3
= tcg_temp_new();
24529 XRa
= extract32(ctx
->opcode
, 6, 4);
24530 XRb
= extract32(ctx
->opcode
, 10, 4);
24531 XRc
= extract32(ctx
->opcode
, 14, 4);
24532 XRd
= extract32(ctx
->opcode
, 18, 4);
24533 optn2
= extract32(ctx
->opcode
, 22, 2);
24534 aptn2
= extract32(ctx
->opcode
, 24, 2);
24536 gen_load_mxu_gpr(t1
, XRb
);
24537 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24538 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24540 gen_load_mxu_gpr(t3
, XRc
);
24541 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24542 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24545 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24546 tcg_gen_mul_tl(t3
, t1
, t3
);
24547 tcg_gen_mul_tl(t2
, t0
, t2
);
24549 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24550 tcg_gen_mul_tl(t3
, t0
, t3
);
24551 tcg_gen_mul_tl(t2
, t0
, t2
);
24553 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24554 tcg_gen_mul_tl(t3
, t1
, t3
);
24555 tcg_gen_mul_tl(t2
, t1
, t2
);
24557 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24558 tcg_gen_mul_tl(t3
, t0
, t3
);
24559 tcg_gen_mul_tl(t2
, t1
, t2
);
24562 gen_load_mxu_gpr(t0
, XRa
);
24563 gen_load_mxu_gpr(t1
, XRd
);
24567 tcg_gen_add_tl(t3
, t0
, t3
);
24568 tcg_gen_add_tl(t2
, t1
, t2
);
24571 tcg_gen_add_tl(t3
, t0
, t3
);
24572 tcg_gen_sub_tl(t2
, t1
, t2
);
24575 tcg_gen_sub_tl(t3
, t0
, t3
);
24576 tcg_gen_add_tl(t2
, t1
, t2
);
24579 tcg_gen_sub_tl(t3
, t0
, t3
);
24580 tcg_gen_sub_tl(t2
, t1
, t2
);
24583 gen_store_mxu_gpr(t3
, XRa
);
24584 gen_store_mxu_gpr(t2
, XRd
);
24593 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24594 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24596 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24598 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24599 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24601 t0
= tcg_temp_new();
24602 t1
= tcg_temp_new();
24603 t2
= tcg_temp_new();
24604 t3
= tcg_temp_new();
24605 t4
= tcg_temp_new();
24606 t5
= tcg_temp_new();
24607 t6
= tcg_temp_new();
24608 t7
= tcg_temp_new();
24610 XRa
= extract32(ctx
->opcode
, 6, 4);
24611 XRb
= extract32(ctx
->opcode
, 10, 4);
24612 XRc
= extract32(ctx
->opcode
, 14, 4);
24613 XRd
= extract32(ctx
->opcode
, 18, 4);
24614 sel
= extract32(ctx
->opcode
, 22, 2);
24616 gen_load_mxu_gpr(t3
, XRb
);
24617 gen_load_mxu_gpr(t7
, XRc
);
24621 tcg_gen_ext8s_tl(t0
, t3
);
24622 tcg_gen_shri_tl(t3
, t3
, 8);
24623 tcg_gen_ext8s_tl(t1
, t3
);
24624 tcg_gen_shri_tl(t3
, t3
, 8);
24625 tcg_gen_ext8s_tl(t2
, t3
);
24626 tcg_gen_shri_tl(t3
, t3
, 8);
24627 tcg_gen_ext8s_tl(t3
, t3
);
24630 tcg_gen_ext8u_tl(t0
, t3
);
24631 tcg_gen_shri_tl(t3
, t3
, 8);
24632 tcg_gen_ext8u_tl(t1
, t3
);
24633 tcg_gen_shri_tl(t3
, t3
, 8);
24634 tcg_gen_ext8u_tl(t2
, t3
);
24635 tcg_gen_shri_tl(t3
, t3
, 8);
24636 tcg_gen_ext8u_tl(t3
, t3
);
24639 tcg_gen_ext8u_tl(t4
, t7
);
24640 tcg_gen_shri_tl(t7
, t7
, 8);
24641 tcg_gen_ext8u_tl(t5
, t7
);
24642 tcg_gen_shri_tl(t7
, t7
, 8);
24643 tcg_gen_ext8u_tl(t6
, t7
);
24644 tcg_gen_shri_tl(t7
, t7
, 8);
24645 tcg_gen_ext8u_tl(t7
, t7
);
24647 tcg_gen_mul_tl(t0
, t0
, t4
);
24648 tcg_gen_mul_tl(t1
, t1
, t5
);
24649 tcg_gen_mul_tl(t2
, t2
, t6
);
24650 tcg_gen_mul_tl(t3
, t3
, t7
);
24652 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24653 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24654 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24655 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24657 tcg_gen_shli_tl(t1
, t1
, 16);
24658 tcg_gen_shli_tl(t3
, t3
, 16);
24660 tcg_gen_or_tl(t0
, t0
, t1
);
24661 tcg_gen_or_tl(t1
, t2
, t3
);
24663 gen_store_mxu_gpr(t0
, XRd
);
24664 gen_store_mxu_gpr(t1
, XRa
);
24677 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24678 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24680 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24683 uint32_t XRa
, Rb
, s12
, sel
;
24685 t0
= tcg_temp_new();
24686 t1
= tcg_temp_new();
24688 XRa
= extract32(ctx
->opcode
, 6, 4);
24689 s12
= extract32(ctx
->opcode
, 10, 10);
24690 sel
= extract32(ctx
->opcode
, 20, 1);
24691 Rb
= extract32(ctx
->opcode
, 21, 5);
24693 gen_load_gpr(t0
, Rb
);
24695 tcg_gen_movi_tl(t1
, s12
);
24696 tcg_gen_shli_tl(t1
, t1
, 2);
24698 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24700 tcg_gen_add_tl(t1
, t0
, t1
);
24701 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24705 tcg_gen_bswap32_tl(t1
, t1
);
24707 gen_store_mxu_gpr(t1
, XRa
);
24715 * MXU instruction category: logic
24716 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24718 * S32NOR S32AND S32OR S32XOR
24722 * S32NOR XRa, XRb, XRc
24723 * Update XRa with the result of logical bitwise 'nor' operation
24724 * applied to the content of XRb and XRc.
24726 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24727 * +-----------+---------+-----+-------+-------+-------+-----------+
24728 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24729 * +-----------+---------+-----+-------+-------+-------+-----------+
24731 static void gen_mxu_S32NOR(DisasContext
*ctx
)
24733 uint32_t pad
, XRc
, XRb
, XRa
;
24735 pad
= extract32(ctx
->opcode
, 21, 5);
24736 XRc
= extract32(ctx
->opcode
, 14, 4);
24737 XRb
= extract32(ctx
->opcode
, 10, 4);
24738 XRa
= extract32(ctx
->opcode
, 6, 4);
24740 if (unlikely(pad
!= 0)) {
24741 /* opcode padding incorrect -> do nothing */
24742 } else if (unlikely(XRa
== 0)) {
24743 /* destination is zero register -> do nothing */
24744 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24745 /* both operands zero registers -> just set destination to all 1s */
24746 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
24747 } else if (unlikely(XRb
== 0)) {
24748 /* XRb zero register -> just set destination to the negation of XRc */
24749 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24750 } else if (unlikely(XRc
== 0)) {
24751 /* XRa zero register -> just set destination to the negation of XRb */
24752 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24753 } else if (unlikely(XRb
== XRc
)) {
24754 /* both operands same -> just set destination to the negation of XRb */
24755 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24757 /* the most general case */
24758 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24763 * S32AND XRa, XRb, XRc
24764 * Update XRa with the result of logical bitwise 'and' operation
24765 * applied to the content of XRb and XRc.
24767 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24768 * +-----------+---------+-----+-------+-------+-------+-----------+
24769 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24770 * +-----------+---------+-----+-------+-------+-------+-----------+
24772 static void gen_mxu_S32AND(DisasContext
*ctx
)
24774 uint32_t pad
, XRc
, XRb
, XRa
;
24776 pad
= extract32(ctx
->opcode
, 21, 5);
24777 XRc
= extract32(ctx
->opcode
, 14, 4);
24778 XRb
= extract32(ctx
->opcode
, 10, 4);
24779 XRa
= extract32(ctx
->opcode
, 6, 4);
24781 if (unlikely(pad
!= 0)) {
24782 /* opcode padding incorrect -> do nothing */
24783 } else if (unlikely(XRa
== 0)) {
24784 /* destination is zero register -> do nothing */
24785 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24786 /* one of operands zero register -> just set destination to all 0s */
24787 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24788 } else if (unlikely(XRb
== XRc
)) {
24789 /* both operands same -> just set destination to one of them */
24790 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24792 /* the most general case */
24793 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24798 * S32OR XRa, XRb, XRc
24799 * Update XRa with the result of logical bitwise 'or' operation
24800 * applied to the content of XRb and XRc.
24802 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24803 * +-----------+---------+-----+-------+-------+-------+-----------+
24804 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24805 * +-----------+---------+-----+-------+-------+-------+-----------+
24807 static void gen_mxu_S32OR(DisasContext
*ctx
)
24809 uint32_t pad
, XRc
, XRb
, XRa
;
24811 pad
= extract32(ctx
->opcode
, 21, 5);
24812 XRc
= extract32(ctx
->opcode
, 14, 4);
24813 XRb
= extract32(ctx
->opcode
, 10, 4);
24814 XRa
= extract32(ctx
->opcode
, 6, 4);
24816 if (unlikely(pad
!= 0)) {
24817 /* opcode padding incorrect -> do nothing */
24818 } else if (unlikely(XRa
== 0)) {
24819 /* destination is zero register -> do nothing */
24820 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24821 /* both operands zero registers -> just set destination to all 0s */
24822 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24823 } else if (unlikely(XRb
== 0)) {
24824 /* XRb zero register -> just set destination to the content of XRc */
24825 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24826 } else if (unlikely(XRc
== 0)) {
24827 /* XRc zero register -> just set destination to the content of XRb */
24828 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24829 } else if (unlikely(XRb
== XRc
)) {
24830 /* both operands same -> just set destination to one of them */
24831 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24833 /* the most general case */
24834 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24839 * S32XOR XRa, XRb, XRc
24840 * Update XRa with the result of logical bitwise 'xor' operation
24841 * applied to the content of XRb and XRc.
24843 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24844 * +-----------+---------+-----+-------+-------+-------+-----------+
24845 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24846 * +-----------+---------+-----+-------+-------+-------+-----------+
24848 static void gen_mxu_S32XOR(DisasContext
*ctx
)
24850 uint32_t pad
, XRc
, XRb
, XRa
;
24852 pad
= extract32(ctx
->opcode
, 21, 5);
24853 XRc
= extract32(ctx
->opcode
, 14, 4);
24854 XRb
= extract32(ctx
->opcode
, 10, 4);
24855 XRa
= extract32(ctx
->opcode
, 6, 4);
24857 if (unlikely(pad
!= 0)) {
24858 /* opcode padding incorrect -> do nothing */
24859 } else if (unlikely(XRa
== 0)) {
24860 /* destination is zero register -> do nothing */
24861 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24862 /* both operands zero registers -> just set destination to all 0s */
24863 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24864 } else if (unlikely(XRb
== 0)) {
24865 /* XRb zero register -> just set destination to the content of XRc */
24866 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
24867 } else if (unlikely(XRc
== 0)) {
24868 /* XRc zero register -> just set destination to the content of XRb */
24869 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24870 } else if (unlikely(XRb
== XRc
)) {
24871 /* both operands same -> just set destination to all 0s */
24872 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24874 /* the most general case */
24875 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
24881 * MXU instruction category max/min
24882 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24884 * S32MAX D16MAX Q8MAX
24885 * S32MIN D16MIN Q8MIN
24889 * S32MAX XRa, XRb, XRc
24890 * Update XRa with the maximum of signed 32-bit integers contained
24893 * S32MIN XRa, XRb, XRc
24894 * Update XRa with the minimum of signed 32-bit integers contained
24897 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24898 * +-----------+---------+-----+-------+-------+-------+-----------+
24899 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24900 * +-----------+---------+-----+-------+-------+-------+-----------+
24902 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
24904 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24906 pad
= extract32(ctx
->opcode
, 21, 5);
24907 opc
= extract32(ctx
->opcode
, 18, 3);
24908 XRc
= extract32(ctx
->opcode
, 14, 4);
24909 XRb
= extract32(ctx
->opcode
, 10, 4);
24910 XRa
= extract32(ctx
->opcode
, 6, 4);
24912 if (unlikely(pad
!= 0)) {
24913 /* opcode padding incorrect -> do nothing */
24914 } else if (unlikely(XRa
== 0)) {
24915 /* destination is zero register -> do nothing */
24916 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
24917 /* both operands zero registers -> just set destination to zero */
24918 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
24919 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
24920 /* exactly one operand is zero register - find which one is not...*/
24921 uint32_t XRx
= XRb
? XRb
: XRc
;
24922 /* ...and do max/min operation with one operand 0 */
24923 if (opc
== OPC_MXU_S32MAX
) {
24924 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24926 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
24928 } else if (unlikely(XRb
== XRc
)) {
24929 /* both operands same -> just set destination to one of them */
24930 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
24932 /* the most general case */
24933 if (opc
== OPC_MXU_S32MAX
) {
24934 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
24937 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
24945 * Update XRa with the 16-bit-wise maximums of signed integers
24946 * contained in XRb and XRc.
24949 * Update XRa with the 16-bit-wise minimums of signed integers
24950 * contained in XRb and XRc.
24952 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24953 * +-----------+---------+-----+-------+-------+-------+-----------+
24954 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
24955 * +-----------+---------+-----+-------+-------+-------+-----------+
24957 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
24959 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
24961 pad
= extract32(ctx
->opcode
, 21, 5);
24962 opc
= extract32(ctx
->opcode
, 18, 3);
24963 XRc
= extract32(ctx
->opcode
, 14, 4);
24964 XRb
= extract32(ctx
->opcode
, 10, 4);
24965 XRa
= extract32(ctx
->opcode
, 6, 4);
24967 if (unlikely(pad
!= 0)) {
24968 /* opcode padding incorrect -> do nothing */
24969 } else if (unlikely(XRc
== 0)) {
24970 /* destination is zero register -> do nothing */
24971 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
24972 /* both operands zero registers -> just set destination to zero */
24973 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
24974 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
24975 /* exactly one operand is zero register - find which one is not...*/
24976 uint32_t XRx
= XRb
? XRb
: XRc
;
24977 /* ...and do half-word-wise max/min with one operand 0 */
24978 TCGv_i32 t0
= tcg_temp_new();
24979 TCGv_i32 t1
= tcg_const_i32(0);
24981 /* the left half-word first */
24982 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
24983 if (opc
== OPC_MXU_D16MAX
) {
24984 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24986 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
24989 /* the right half-word */
24990 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
24991 /* move half-words to the leftmost position */
24992 tcg_gen_shli_i32(t0
, t0
, 16);
24993 /* t0 will be max/min of t0 and t1 */
24994 if (opc
== OPC_MXU_D16MAX
) {
24995 tcg_gen_smax_i32(t0
, t0
, t1
);
24997 tcg_gen_smin_i32(t0
, t0
, t1
);
24999 /* return resulting half-words to its original position */
25000 tcg_gen_shri_i32(t0
, t0
, 16);
25001 /* finaly update the destination */
25002 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25006 } else if (unlikely(XRb
== XRc
)) {
25007 /* both operands same -> just set destination to one of them */
25008 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25010 /* the most general case */
25011 TCGv_i32 t0
= tcg_temp_new();
25012 TCGv_i32 t1
= tcg_temp_new();
25014 /* the left half-word first */
25015 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25016 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25017 if (opc
== OPC_MXU_D16MAX
) {
25018 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25020 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25023 /* the right half-word */
25024 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25025 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25026 /* move half-words to the leftmost position */
25027 tcg_gen_shli_i32(t0
, t0
, 16);
25028 tcg_gen_shli_i32(t1
, t1
, 16);
25029 /* t0 will be max/min of t0 and t1 */
25030 if (opc
== OPC_MXU_D16MAX
) {
25031 tcg_gen_smax_i32(t0
, t0
, t1
);
25033 tcg_gen_smin_i32(t0
, t0
, t1
);
25035 /* return resulting half-words to its original position */
25036 tcg_gen_shri_i32(t0
, t0
, 16);
25037 /* finaly update the destination */
25038 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25047 * Update XRa with the 8-bit-wise maximums of signed integers
25048 * contained in XRb and XRc.
25051 * Update XRa with the 8-bit-wise minimums of signed integers
25052 * contained in XRb and XRc.
25054 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25055 * +-----------+---------+-----+-------+-------+-------+-----------+
25056 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25057 * +-----------+---------+-----+-------+-------+-------+-----------+
25059 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25061 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25063 pad
= extract32(ctx
->opcode
, 21, 5);
25064 opc
= extract32(ctx
->opcode
, 18, 3);
25065 XRc
= extract32(ctx
->opcode
, 14, 4);
25066 XRb
= extract32(ctx
->opcode
, 10, 4);
25067 XRa
= extract32(ctx
->opcode
, 6, 4);
25069 if (unlikely(pad
!= 0)) {
25070 /* opcode padding incorrect -> do nothing */
25071 } else if (unlikely(XRa
== 0)) {
25072 /* destination is zero register -> do nothing */
25073 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25074 /* both operands zero registers -> just set destination to zero */
25075 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25076 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25077 /* exactly one operand is zero register - make it be the first...*/
25078 uint32_t XRx
= XRb
? XRb
: XRc
;
25079 /* ...and do byte-wise max/min with one operand 0 */
25080 TCGv_i32 t0
= tcg_temp_new();
25081 TCGv_i32 t1
= tcg_const_i32(0);
25084 /* the leftmost byte (byte 3) first */
25085 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25086 if (opc
== OPC_MXU_Q8MAX
) {
25087 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25089 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25092 /* bytes 2, 1, 0 */
25093 for (i
= 2; i
>= 0; i
--) {
25094 /* extract the byte */
25095 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25096 /* move the byte to the leftmost position */
25097 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25098 /* t0 will be max/min of t0 and t1 */
25099 if (opc
== OPC_MXU_Q8MAX
) {
25100 tcg_gen_smax_i32(t0
, t0
, t1
);
25102 tcg_gen_smin_i32(t0
, t0
, t1
);
25104 /* return resulting byte to its original position */
25105 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25106 /* finaly update the destination */
25107 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25112 } else if (unlikely(XRb
== XRc
)) {
25113 /* both operands same -> just set destination to one of them */
25114 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25116 /* the most general case */
25117 TCGv_i32 t0
= tcg_temp_new();
25118 TCGv_i32 t1
= tcg_temp_new();
25121 /* the leftmost bytes (bytes 3) first */
25122 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25123 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25124 if (opc
== OPC_MXU_Q8MAX
) {
25125 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25127 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25130 /* bytes 2, 1, 0 */
25131 for (i
= 2; i
>= 0; i
--) {
25132 /* extract corresponding bytes */
25133 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25134 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25135 /* move the bytes to the leftmost position */
25136 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25137 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25138 /* t0 will be max/min of t0 and t1 */
25139 if (opc
== OPC_MXU_Q8MAX
) {
25140 tcg_gen_smax_i32(t0
, t0
, t1
);
25142 tcg_gen_smin_i32(t0
, t0
, t1
);
25144 /* return resulting byte to its original position */
25145 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25146 /* finaly update the destination */
25147 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25157 * MXU instruction category: align
25158 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25164 * S32ALNI XRc, XRb, XRa, optn3
25165 * Arrange bytes from XRb and XRc according to one of five sets of
25166 * rules determined by optn3, and place the result in XRa.
25168 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25169 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25170 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25171 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25174 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25176 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25178 optn3
= extract32(ctx
->opcode
, 23, 3);
25179 pad
= extract32(ctx
->opcode
, 21, 2);
25180 XRc
= extract32(ctx
->opcode
, 14, 4);
25181 XRb
= extract32(ctx
->opcode
, 10, 4);
25182 XRa
= extract32(ctx
->opcode
, 6, 4);
25184 if (unlikely(pad
!= 0)) {
25185 /* opcode padding incorrect -> do nothing */
25186 } else if (unlikely(XRa
== 0)) {
25187 /* destination is zero register -> do nothing */
25188 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25189 /* both operands zero registers -> just set destination to all 0s */
25190 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25191 } else if (unlikely(XRb
== 0)) {
25192 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25194 case MXU_OPTN3_PTN0
:
25195 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25197 case MXU_OPTN3_PTN1
:
25198 case MXU_OPTN3_PTN2
:
25199 case MXU_OPTN3_PTN3
:
25200 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25203 case MXU_OPTN3_PTN4
:
25204 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25207 } else if (unlikely(XRc
== 0)) {
25208 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25210 case MXU_OPTN3_PTN0
:
25211 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25213 case MXU_OPTN3_PTN1
:
25214 case MXU_OPTN3_PTN2
:
25215 case MXU_OPTN3_PTN3
:
25216 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25218 case MXU_OPTN3_PTN4
:
25219 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25222 } else if (unlikely(XRb
== XRc
)) {
25223 /* both operands same -> just rotation or moving from any of them */
25225 case MXU_OPTN3_PTN0
:
25226 case MXU_OPTN3_PTN4
:
25227 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25229 case MXU_OPTN3_PTN1
:
25230 case MXU_OPTN3_PTN2
:
25231 case MXU_OPTN3_PTN3
:
25232 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25236 /* the most general case */
25238 case MXU_OPTN3_PTN0
:
25242 /* +---------------+ */
25243 /* | A B C D | E F G H */
25244 /* +-------+-------+ */
25249 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25252 case MXU_OPTN3_PTN1
:
25256 /* +-------------------+ */
25257 /* A | B C D E | F G H */
25258 /* +---------+---------+ */
25263 TCGv_i32 t0
= tcg_temp_new();
25264 TCGv_i32 t1
= tcg_temp_new();
25266 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25267 tcg_gen_shli_i32(t0
, t0
, 8);
25269 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25270 tcg_gen_shri_i32(t1
, t1
, 24);
25272 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25278 case MXU_OPTN3_PTN2
:
25282 /* +-------------------+ */
25283 /* A B | C D E F | G H */
25284 /* +---------+---------+ */
25289 TCGv_i32 t0
= tcg_temp_new();
25290 TCGv_i32 t1
= tcg_temp_new();
25292 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25293 tcg_gen_shli_i32(t0
, t0
, 16);
25295 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25296 tcg_gen_shri_i32(t1
, t1
, 16);
25298 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25304 case MXU_OPTN3_PTN3
:
25308 /* +-------------------+ */
25309 /* A B C | D E F G | H */
25310 /* +---------+---------+ */
25315 TCGv_i32 t0
= tcg_temp_new();
25316 TCGv_i32 t1
= tcg_temp_new();
25318 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25319 tcg_gen_shli_i32(t0
, t0
, 24);
25321 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25322 tcg_gen_shri_i32(t1
, t1
, 8);
25324 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25330 case MXU_OPTN3_PTN4
:
25334 /* +---------------+ */
25335 /* A B C D | E F G H | */
25336 /* +-------+-------+ */
25341 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25350 * Decoding engine for MXU
25351 * =======================
25356 * Decode MXU pool00
25358 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25359 * +-----------+---------+-----+-------+-------+-------+-----------+
25360 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25361 * +-----------+---------+-----+-------+-------+-------+-----------+
25364 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25366 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25369 case OPC_MXU_S32MAX
:
25370 case OPC_MXU_S32MIN
:
25371 gen_mxu_S32MAX_S32MIN(ctx
);
25373 case OPC_MXU_D16MAX
:
25374 case OPC_MXU_D16MIN
:
25375 gen_mxu_D16MAX_D16MIN(ctx
);
25377 case OPC_MXU_Q8MAX
:
25378 case OPC_MXU_Q8MIN
:
25379 gen_mxu_Q8MAX_Q8MIN(ctx
);
25381 case OPC_MXU_Q8SLT
:
25382 /* TODO: Implement emulation of Q8SLT instruction. */
25383 MIPS_INVAL("OPC_MXU_Q8SLT");
25384 generate_exception_end(ctx
, EXCP_RI
);
25386 case OPC_MXU_Q8SLTU
:
25387 /* TODO: Implement emulation of Q8SLTU instruction. */
25388 MIPS_INVAL("OPC_MXU_Q8SLTU");
25389 generate_exception_end(ctx
, EXCP_RI
);
25392 MIPS_INVAL("decode_opc_mxu");
25393 generate_exception_end(ctx
, EXCP_RI
);
25400 * Decode MXU pool01
25402 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25403 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25404 * +-----------+---------+-----+-------+-------+-------+-----------+
25405 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25406 * +-----------+---------+-----+-------+-------+-------+-----------+
25409 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25410 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25411 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25412 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25415 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25417 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25420 case OPC_MXU_S32SLT
:
25421 /* TODO: Implement emulation of S32SLT instruction. */
25422 MIPS_INVAL("OPC_MXU_S32SLT");
25423 generate_exception_end(ctx
, EXCP_RI
);
25425 case OPC_MXU_D16SLT
:
25426 /* TODO: Implement emulation of D16SLT instruction. */
25427 MIPS_INVAL("OPC_MXU_D16SLT");
25428 generate_exception_end(ctx
, EXCP_RI
);
25430 case OPC_MXU_D16AVG
:
25431 /* TODO: Implement emulation of D16AVG instruction. */
25432 MIPS_INVAL("OPC_MXU_D16AVG");
25433 generate_exception_end(ctx
, EXCP_RI
);
25435 case OPC_MXU_D16AVGR
:
25436 /* TODO: Implement emulation of D16AVGR instruction. */
25437 MIPS_INVAL("OPC_MXU_D16AVGR");
25438 generate_exception_end(ctx
, EXCP_RI
);
25440 case OPC_MXU_Q8AVG
:
25441 /* TODO: Implement emulation of Q8AVG instruction. */
25442 MIPS_INVAL("OPC_MXU_Q8AVG");
25443 generate_exception_end(ctx
, EXCP_RI
);
25445 case OPC_MXU_Q8AVGR
:
25446 /* TODO: Implement emulation of Q8AVGR instruction. */
25447 MIPS_INVAL("OPC_MXU_Q8AVGR");
25448 generate_exception_end(ctx
, EXCP_RI
);
25450 case OPC_MXU_Q8ADD
:
25451 /* TODO: Implement emulation of Q8ADD instruction. */
25452 MIPS_INVAL("OPC_MXU_Q8ADD");
25453 generate_exception_end(ctx
, EXCP_RI
);
25456 MIPS_INVAL("decode_opc_mxu");
25457 generate_exception_end(ctx
, EXCP_RI
);
25464 * Decode MXU pool02
25466 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25467 * +-----------+---------+-----+-------+-------+-------+-----------+
25468 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25469 * +-----------+---------+-----+-------+-------+-------+-----------+
25472 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25474 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25477 case OPC_MXU_S32CPS
:
25478 /* TODO: Implement emulation of S32CPS instruction. */
25479 MIPS_INVAL("OPC_MXU_S32CPS");
25480 generate_exception_end(ctx
, EXCP_RI
);
25482 case OPC_MXU_D16CPS
:
25483 /* TODO: Implement emulation of D16CPS instruction. */
25484 MIPS_INVAL("OPC_MXU_D16CPS");
25485 generate_exception_end(ctx
, EXCP_RI
);
25487 case OPC_MXU_Q8ABD
:
25488 /* TODO: Implement emulation of Q8ABD instruction. */
25489 MIPS_INVAL("OPC_MXU_Q8ABD");
25490 generate_exception_end(ctx
, EXCP_RI
);
25492 case OPC_MXU_Q16SAT
:
25493 /* TODO: Implement emulation of Q16SAT instruction. */
25494 MIPS_INVAL("OPC_MXU_Q16SAT");
25495 generate_exception_end(ctx
, EXCP_RI
);
25498 MIPS_INVAL("decode_opc_mxu");
25499 generate_exception_end(ctx
, EXCP_RI
);
25506 * Decode MXU pool03
25509 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25510 * +-----------+---+---+-------+-------+-------+-------+-----------+
25511 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25512 * +-----------+---+---+-------+-------+-------+-------+-----------+
25515 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25516 * +-----------+---+---+-------+-------+-------+-------+-----------+
25517 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25518 * +-----------+---+---+-------+-------+-------+-------+-----------+
25521 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25523 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25526 case OPC_MXU_D16MULF
:
25527 /* TODO: Implement emulation of D16MULF instruction. */
25528 MIPS_INVAL("OPC_MXU_D16MULF");
25529 generate_exception_end(ctx
, EXCP_RI
);
25531 case OPC_MXU_D16MULE
:
25532 /* TODO: Implement emulation of D16MULE instruction. */
25533 MIPS_INVAL("OPC_MXU_D16MULE");
25534 generate_exception_end(ctx
, EXCP_RI
);
25537 MIPS_INVAL("decode_opc_mxu");
25538 generate_exception_end(ctx
, EXCP_RI
);
25545 * Decode MXU pool04
25547 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25548 * +-----------+---------+-+-------------------+-------+-----------+
25549 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25550 * +-----------+---------+-+-------------------+-------+-----------+
25553 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25555 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25558 case OPC_MXU_S32LDD
:
25559 case OPC_MXU_S32LDDR
:
25560 gen_mxu_s32ldd_s32lddr(ctx
);
25563 MIPS_INVAL("decode_opc_mxu");
25564 generate_exception_end(ctx
, EXCP_RI
);
25571 * Decode MXU pool05
25573 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25574 * +-----------+---------+-+-------------------+-------+-----------+
25575 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25576 * +-----------+---------+-+-------------------+-------+-----------+
25579 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25581 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25584 case OPC_MXU_S32STD
:
25585 /* TODO: Implement emulation of S32STD instruction. */
25586 MIPS_INVAL("OPC_MXU_S32STD");
25587 generate_exception_end(ctx
, EXCP_RI
);
25589 case OPC_MXU_S32STDR
:
25590 /* TODO: Implement emulation of S32STDR instruction. */
25591 MIPS_INVAL("OPC_MXU_S32STDR");
25592 generate_exception_end(ctx
, EXCP_RI
);
25595 MIPS_INVAL("decode_opc_mxu");
25596 generate_exception_end(ctx
, EXCP_RI
);
25603 * Decode MXU pool06
25605 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25606 * +-----------+---------+---------+---+-------+-------+-----------+
25607 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25608 * +-----------+---------+---------+---+-------+-------+-----------+
25611 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25613 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25616 case OPC_MXU_S32LDDV
:
25617 /* TODO: Implement emulation of S32LDDV instruction. */
25618 MIPS_INVAL("OPC_MXU_S32LDDV");
25619 generate_exception_end(ctx
, EXCP_RI
);
25621 case OPC_MXU_S32LDDVR
:
25622 /* TODO: Implement emulation of S32LDDVR instruction. */
25623 MIPS_INVAL("OPC_MXU_S32LDDVR");
25624 generate_exception_end(ctx
, EXCP_RI
);
25627 MIPS_INVAL("decode_opc_mxu");
25628 generate_exception_end(ctx
, EXCP_RI
);
25635 * Decode MXU pool07
25637 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25638 * +-----------+---------+---------+---+-------+-------+-----------+
25639 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
25640 * +-----------+---------+---------+---+-------+-------+-----------+
25643 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
25645 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25648 case OPC_MXU_S32STDV
:
25649 /* TODO: Implement emulation of S32TDV instruction. */
25650 MIPS_INVAL("OPC_MXU_S32TDV");
25651 generate_exception_end(ctx
, EXCP_RI
);
25653 case OPC_MXU_S32STDVR
:
25654 /* TODO: Implement emulation of S32TDVR instruction. */
25655 MIPS_INVAL("OPC_MXU_S32TDVR");
25656 generate_exception_end(ctx
, EXCP_RI
);
25659 MIPS_INVAL("decode_opc_mxu");
25660 generate_exception_end(ctx
, EXCP_RI
);
25667 * Decode MXU pool08
25669 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25670 * +-----------+---------+-+-------------------+-------+-----------+
25671 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
25672 * +-----------+---------+-+-------------------+-------+-----------+
25675 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
25677 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25680 case OPC_MXU_S32LDI
:
25681 /* TODO: Implement emulation of S32LDI instruction. */
25682 MIPS_INVAL("OPC_MXU_S32LDI");
25683 generate_exception_end(ctx
, EXCP_RI
);
25685 case OPC_MXU_S32LDIR
:
25686 /* TODO: Implement emulation of S32LDIR instruction. */
25687 MIPS_INVAL("OPC_MXU_S32LDIR");
25688 generate_exception_end(ctx
, EXCP_RI
);
25691 MIPS_INVAL("decode_opc_mxu");
25692 generate_exception_end(ctx
, EXCP_RI
);
25699 * Decode MXU pool09
25701 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25702 * +-----------+---------+-+-------------------+-------+-----------+
25703 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25704 * +-----------+---------+-+-------------------+-------+-----------+
25707 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25709 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25712 case OPC_MXU_S32SDI
:
25713 /* TODO: Implement emulation of S32SDI instruction. */
25714 MIPS_INVAL("OPC_MXU_S32SDI");
25715 generate_exception_end(ctx
, EXCP_RI
);
25717 case OPC_MXU_S32SDIR
:
25718 /* TODO: Implement emulation of S32SDIR instruction. */
25719 MIPS_INVAL("OPC_MXU_S32SDIR");
25720 generate_exception_end(ctx
, EXCP_RI
);
25723 MIPS_INVAL("decode_opc_mxu");
25724 generate_exception_end(ctx
, EXCP_RI
);
25731 * Decode MXU pool10
25733 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25734 * +-----------+---------+---------+---+-------+-------+-----------+
25735 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
25736 * +-----------+---------+---------+---+-------+-------+-----------+
25739 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
25741 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25744 case OPC_MXU_S32LDIV
:
25745 /* TODO: Implement emulation of S32LDIV instruction. */
25746 MIPS_INVAL("OPC_MXU_S32LDIV");
25747 generate_exception_end(ctx
, EXCP_RI
);
25749 case OPC_MXU_S32LDIVR
:
25750 /* TODO: Implement emulation of S32LDIVR instruction. */
25751 MIPS_INVAL("OPC_MXU_S32LDIVR");
25752 generate_exception_end(ctx
, EXCP_RI
);
25755 MIPS_INVAL("decode_opc_mxu");
25756 generate_exception_end(ctx
, EXCP_RI
);
25763 * Decode MXU pool11
25765 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25766 * +-----------+---------+---------+---+-------+-------+-----------+
25767 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
25768 * +-----------+---------+---------+---+-------+-------+-----------+
25771 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
25773 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25776 case OPC_MXU_S32SDIV
:
25777 /* TODO: Implement emulation of S32SDIV instruction. */
25778 MIPS_INVAL("OPC_MXU_S32SDIV");
25779 generate_exception_end(ctx
, EXCP_RI
);
25781 case OPC_MXU_S32SDIVR
:
25782 /* TODO: Implement emulation of S32SDIVR instruction. */
25783 MIPS_INVAL("OPC_MXU_S32SDIVR");
25784 generate_exception_end(ctx
, EXCP_RI
);
25787 MIPS_INVAL("decode_opc_mxu");
25788 generate_exception_end(ctx
, EXCP_RI
);
25795 * Decode MXU pool12
25797 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25798 * +-----------+---+---+-------+-------+-------+-------+-----------+
25799 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
25800 * +-----------+---+---+-------+-------+-------+-------+-----------+
25803 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
25805 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25808 case OPC_MXU_D32ACC
:
25809 /* TODO: Implement emulation of D32ACC instruction. */
25810 MIPS_INVAL("OPC_MXU_D32ACC");
25811 generate_exception_end(ctx
, EXCP_RI
);
25813 case OPC_MXU_D32ACCM
:
25814 /* TODO: Implement emulation of D32ACCM instruction. */
25815 MIPS_INVAL("OPC_MXU_D32ACCM");
25816 generate_exception_end(ctx
, EXCP_RI
);
25818 case OPC_MXU_D32ASUM
:
25819 /* TODO: Implement emulation of D32ASUM instruction. */
25820 MIPS_INVAL("OPC_MXU_D32ASUM");
25821 generate_exception_end(ctx
, EXCP_RI
);
25824 MIPS_INVAL("decode_opc_mxu");
25825 generate_exception_end(ctx
, EXCP_RI
);
25832 * Decode MXU pool13
25834 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25835 * +-----------+---+---+-------+-------+-------+-------+-----------+
25836 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
25837 * +-----------+---+---+-------+-------+-------+-------+-----------+
25840 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
25842 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25845 case OPC_MXU_Q16ACC
:
25846 /* TODO: Implement emulation of Q16ACC instruction. */
25847 MIPS_INVAL("OPC_MXU_Q16ACC");
25848 generate_exception_end(ctx
, EXCP_RI
);
25850 case OPC_MXU_Q16ACCM
:
25851 /* TODO: Implement emulation of Q16ACCM instruction. */
25852 MIPS_INVAL("OPC_MXU_Q16ACCM");
25853 generate_exception_end(ctx
, EXCP_RI
);
25855 case OPC_MXU_Q16ASUM
:
25856 /* TODO: Implement emulation of Q16ASUM instruction. */
25857 MIPS_INVAL("OPC_MXU_Q16ASUM");
25858 generate_exception_end(ctx
, EXCP_RI
);
25861 MIPS_INVAL("decode_opc_mxu");
25862 generate_exception_end(ctx
, EXCP_RI
);
25869 * Decode MXU pool14
25872 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25873 * +-----------+---+---+-------+-------+-------+-------+-----------+
25874 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
25875 * +-----------+---+---+-------+-------+-------+-------+-----------+
25878 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25879 * +-----------+---+---+-------+-------+-------+-------+-----------+
25880 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
25881 * +-----------+---+---+-------+-------+-------+-------+-----------+
25884 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
25886 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25889 case OPC_MXU_Q8ADDE
:
25890 /* TODO: Implement emulation of Q8ADDE instruction. */
25891 MIPS_INVAL("OPC_MXU_Q8ADDE");
25892 generate_exception_end(ctx
, EXCP_RI
);
25894 case OPC_MXU_D8SUM
:
25895 /* TODO: Implement emulation of D8SUM instruction. */
25896 MIPS_INVAL("OPC_MXU_D8SUM");
25897 generate_exception_end(ctx
, EXCP_RI
);
25899 case OPC_MXU_D8SUMC
:
25900 /* TODO: Implement emulation of D8SUMC instruction. */
25901 MIPS_INVAL("OPC_MXU_D8SUMC");
25902 generate_exception_end(ctx
, EXCP_RI
);
25905 MIPS_INVAL("decode_opc_mxu");
25906 generate_exception_end(ctx
, EXCP_RI
);
25913 * Decode MXU pool15
25915 * S32MUL, S32MULU, S32EXTRV:
25916 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25917 * +-----------+---------+---------+---+-------+-------+-----------+
25918 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
25919 * +-----------+---------+---------+---+-------+-------+-----------+
25922 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25923 * +-----------+---------+---------+---+-------+-------+-----------+
25924 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
25925 * +-----------+---------+---------+---+-------+-------+-----------+
25928 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
25930 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
25933 case OPC_MXU_S32MUL
:
25934 /* TODO: Implement emulation of S32MUL instruction. */
25935 MIPS_INVAL("OPC_MXU_S32MUL");
25936 generate_exception_end(ctx
, EXCP_RI
);
25938 case OPC_MXU_S32MULU
:
25939 /* TODO: Implement emulation of S32MULU instruction. */
25940 MIPS_INVAL("OPC_MXU_S32MULU");
25941 generate_exception_end(ctx
, EXCP_RI
);
25943 case OPC_MXU_S32EXTR
:
25944 /* TODO: Implement emulation of S32EXTR instruction. */
25945 MIPS_INVAL("OPC_MXU_S32EXTR");
25946 generate_exception_end(ctx
, EXCP_RI
);
25948 case OPC_MXU_S32EXTRV
:
25949 /* TODO: Implement emulation of S32EXTRV instruction. */
25950 MIPS_INVAL("OPC_MXU_S32EXTRV");
25951 generate_exception_end(ctx
, EXCP_RI
);
25954 MIPS_INVAL("decode_opc_mxu");
25955 generate_exception_end(ctx
, EXCP_RI
);
25962 * Decode MXU pool16
25965 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25966 * +-----------+---------+-----+-------+-------+-------+-----------+
25967 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
25968 * +-----------+---------+-----+-------+-------+-------+-----------+
25971 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25972 * +-----------+---------+-----+-------+-------+-------+-----------+
25973 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
25974 * +-----------+---------+-----+-------+-------+-------+-----------+
25977 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25978 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25979 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25980 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25983 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25984 * +-----------+-----+---+-----+-------+---------------+-----------+
25985 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
25986 * +-----------+-----+---+-----+-------+---------------+-----------+
25988 * S32NOR, S32AND, S32OR, S32XOR:
25989 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25990 * +-----------+---------+-----+-------+-------+-------+-----------+
25991 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25992 * +-----------+---------+-----+-------+-------+-------+-----------+
25995 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
25997 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26000 case OPC_MXU_D32SARW
:
26001 /* TODO: Implement emulation of D32SARW instruction. */
26002 MIPS_INVAL("OPC_MXU_D32SARW");
26003 generate_exception_end(ctx
, EXCP_RI
);
26005 case OPC_MXU_S32ALN
:
26006 /* TODO: Implement emulation of S32ALN instruction. */
26007 MIPS_INVAL("OPC_MXU_S32ALN");
26008 generate_exception_end(ctx
, EXCP_RI
);
26010 case OPC_MXU_S32ALNI
:
26011 gen_mxu_S32ALNI(ctx
);
26013 case OPC_MXU_S32LUI
:
26014 /* TODO: Implement emulation of S32LUI instruction. */
26015 MIPS_INVAL("OPC_MXU_S32LUI");
26016 generate_exception_end(ctx
, EXCP_RI
);
26018 case OPC_MXU_S32NOR
:
26019 gen_mxu_S32NOR(ctx
);
26021 case OPC_MXU_S32AND
:
26022 gen_mxu_S32AND(ctx
);
26024 case OPC_MXU_S32OR
:
26025 gen_mxu_S32OR(ctx
);
26027 case OPC_MXU_S32XOR
:
26028 gen_mxu_S32XOR(ctx
);
26031 MIPS_INVAL("decode_opc_mxu");
26032 generate_exception_end(ctx
, EXCP_RI
);
26039 * Decode MXU pool17
26041 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26042 * +-----------+---------+---------+---+---------+-----+-----------+
26043 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26044 * +-----------+---------+---------+---+---------+-----+-----------+
26047 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26049 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26053 /* TODO: Implement emulation of LXW instruction. */
26054 MIPS_INVAL("OPC_MXU_LXW");
26055 generate_exception_end(ctx
, EXCP_RI
);
26058 /* TODO: Implement emulation of LXH instruction. */
26059 MIPS_INVAL("OPC_MXU_LXH");
26060 generate_exception_end(ctx
, EXCP_RI
);
26063 /* TODO: Implement emulation of LXHU instruction. */
26064 MIPS_INVAL("OPC_MXU_LXHU");
26065 generate_exception_end(ctx
, EXCP_RI
);
26068 /* TODO: Implement emulation of LXB instruction. */
26069 MIPS_INVAL("OPC_MXU_LXB");
26070 generate_exception_end(ctx
, EXCP_RI
);
26073 /* TODO: Implement emulation of LXBU instruction. */
26074 MIPS_INVAL("OPC_MXU_LXBU");
26075 generate_exception_end(ctx
, EXCP_RI
);
26078 MIPS_INVAL("decode_opc_mxu");
26079 generate_exception_end(ctx
, EXCP_RI
);
26085 * Decode MXU pool18
26087 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26088 * +-----------+---------+-----+-------+-------+-------+-----------+
26089 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26090 * +-----------+---------+-----+-------+-------+-------+-----------+
26093 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26095 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26098 case OPC_MXU_D32SLLV
:
26099 /* TODO: Implement emulation of D32SLLV instruction. */
26100 MIPS_INVAL("OPC_MXU_D32SLLV");
26101 generate_exception_end(ctx
, EXCP_RI
);
26103 case OPC_MXU_D32SLRV
:
26104 /* TODO: Implement emulation of D32SLRV instruction. */
26105 MIPS_INVAL("OPC_MXU_D32SLRV");
26106 generate_exception_end(ctx
, EXCP_RI
);
26108 case OPC_MXU_D32SARV
:
26109 /* TODO: Implement emulation of D32SARV instruction. */
26110 MIPS_INVAL("OPC_MXU_D32SARV");
26111 generate_exception_end(ctx
, EXCP_RI
);
26113 case OPC_MXU_Q16SLLV
:
26114 /* TODO: Implement emulation of Q16SLLV instruction. */
26115 MIPS_INVAL("OPC_MXU_Q16SLLV");
26116 generate_exception_end(ctx
, EXCP_RI
);
26118 case OPC_MXU_Q16SLRV
:
26119 /* TODO: Implement emulation of Q16SLRV instruction. */
26120 MIPS_INVAL("OPC_MXU_Q16SLRV");
26121 generate_exception_end(ctx
, EXCP_RI
);
26123 case OPC_MXU_Q16SARV
:
26124 /* TODO: Implement emulation of Q16SARV instruction. */
26125 MIPS_INVAL("OPC_MXU_Q16SARV");
26126 generate_exception_end(ctx
, EXCP_RI
);
26129 MIPS_INVAL("decode_opc_mxu");
26130 generate_exception_end(ctx
, EXCP_RI
);
26137 * Decode MXU pool19
26139 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26140 * +-----------+---+---+-------+-------+-------+-------+-----------+
26141 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26142 * +-----------+---+---+-------+-------+-------+-------+-----------+
26145 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26147 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26150 case OPC_MXU_Q8MUL
:
26151 case OPC_MXU_Q8MULSU
:
26152 gen_mxu_q8mul_q8mulsu(ctx
);
26155 MIPS_INVAL("decode_opc_mxu");
26156 generate_exception_end(ctx
, EXCP_RI
);
26163 * Decode MXU pool20
26165 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26166 * +-----------+---------+-----+-------+-------+-------+-----------+
26167 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26168 * +-----------+---------+-----+-------+-------+-------+-----------+
26171 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26173 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26176 case OPC_MXU_Q8MOVZ
:
26177 /* TODO: Implement emulation of Q8MOVZ instruction. */
26178 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26179 generate_exception_end(ctx
, EXCP_RI
);
26181 case OPC_MXU_Q8MOVN
:
26182 /* TODO: Implement emulation of Q8MOVN instruction. */
26183 MIPS_INVAL("OPC_MXU_Q8MOVN");
26184 generate_exception_end(ctx
, EXCP_RI
);
26186 case OPC_MXU_D16MOVZ
:
26187 /* TODO: Implement emulation of D16MOVZ instruction. */
26188 MIPS_INVAL("OPC_MXU_D16MOVZ");
26189 generate_exception_end(ctx
, EXCP_RI
);
26191 case OPC_MXU_D16MOVN
:
26192 /* TODO: Implement emulation of D16MOVN instruction. */
26193 MIPS_INVAL("OPC_MXU_D16MOVN");
26194 generate_exception_end(ctx
, EXCP_RI
);
26196 case OPC_MXU_S32MOVZ
:
26197 /* TODO: Implement emulation of S32MOVZ instruction. */
26198 MIPS_INVAL("OPC_MXU_S32MOVZ");
26199 generate_exception_end(ctx
, EXCP_RI
);
26201 case OPC_MXU_S32MOVN
:
26202 /* TODO: Implement emulation of S32MOVN instruction. */
26203 MIPS_INVAL("OPC_MXU_S32MOVN");
26204 generate_exception_end(ctx
, EXCP_RI
);
26207 MIPS_INVAL("decode_opc_mxu");
26208 generate_exception_end(ctx
, EXCP_RI
);
26215 * Decode MXU pool21
26217 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26218 * +-----------+---+---+-------+-------+-------+-------+-----------+
26219 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26220 * +-----------+---+---+-------+-------+-------+-------+-----------+
26223 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26225 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26228 case OPC_MXU_Q8MAC
:
26229 /* TODO: Implement emulation of Q8MAC instruction. */
26230 MIPS_INVAL("OPC_MXU_Q8MAC");
26231 generate_exception_end(ctx
, EXCP_RI
);
26233 case OPC_MXU_Q8MACSU
:
26234 /* TODO: Implement emulation of Q8MACSU instruction. */
26235 MIPS_INVAL("OPC_MXU_Q8MACSU");
26236 generate_exception_end(ctx
, EXCP_RI
);
26239 MIPS_INVAL("decode_opc_mxu");
26240 generate_exception_end(ctx
, EXCP_RI
);
26247 * Main MXU decoding function
26249 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26250 * +-----------+---------------------------------------+-----------+
26251 * | SPECIAL2 | |x x x x x x|
26252 * +-----------+---------------------------------------+-----------+
26255 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26258 * TODO: Investigate necessity of including handling of
26259 * CLZ, CLO, SDBB in this function, as they belong to
26260 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26262 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26264 if (opcode
== OPC__MXU_MUL
) {
26265 uint32_t rs
, rt
, rd
, op1
;
26267 rs
= extract32(ctx
->opcode
, 21, 5);
26268 rt
= extract32(ctx
->opcode
, 16, 5);
26269 rd
= extract32(ctx
->opcode
, 11, 5);
26270 op1
= MASK_SPECIAL2(ctx
->opcode
);
26272 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26277 if (opcode
== OPC_MXU_S32M2I
) {
26278 gen_mxu_s32m2i(ctx
);
26282 if (opcode
== OPC_MXU_S32I2M
) {
26283 gen_mxu_s32i2m(ctx
);
26288 TCGv t_mxu_cr
= tcg_temp_new();
26289 TCGLabel
*l_exit
= gen_new_label();
26291 gen_load_mxu_cr(t_mxu_cr
);
26292 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26293 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26296 case OPC_MXU_S32MADD
:
26297 /* TODO: Implement emulation of S32MADD instruction. */
26298 MIPS_INVAL("OPC_MXU_S32MADD");
26299 generate_exception_end(ctx
, EXCP_RI
);
26301 case OPC_MXU_S32MADDU
:
26302 /* TODO: Implement emulation of S32MADDU instruction. */
26303 MIPS_INVAL("OPC_MXU_S32MADDU");
26304 generate_exception_end(ctx
, EXCP_RI
);
26306 case OPC_MXU__POOL00
:
26307 decode_opc_mxu__pool00(env
, ctx
);
26309 case OPC_MXU_S32MSUB
:
26310 /* TODO: Implement emulation of S32MSUB instruction. */
26311 MIPS_INVAL("OPC_MXU_S32MSUB");
26312 generate_exception_end(ctx
, EXCP_RI
);
26314 case OPC_MXU_S32MSUBU
:
26315 /* TODO: Implement emulation of S32MSUBU instruction. */
26316 MIPS_INVAL("OPC_MXU_S32MSUBU");
26317 generate_exception_end(ctx
, EXCP_RI
);
26319 case OPC_MXU__POOL01
:
26320 decode_opc_mxu__pool01(env
, ctx
);
26322 case OPC_MXU__POOL02
:
26323 decode_opc_mxu__pool02(env
, ctx
);
26325 case OPC_MXU_D16MUL
:
26326 gen_mxu_d16mul(ctx
);
26328 case OPC_MXU__POOL03
:
26329 decode_opc_mxu__pool03(env
, ctx
);
26331 case OPC_MXU_D16MAC
:
26332 gen_mxu_d16mac(ctx
);
26334 case OPC_MXU_D16MACF
:
26335 /* TODO: Implement emulation of D16MACF instruction. */
26336 MIPS_INVAL("OPC_MXU_D16MACF");
26337 generate_exception_end(ctx
, EXCP_RI
);
26339 case OPC_MXU_D16MADL
:
26340 /* TODO: Implement emulation of D16MADL instruction. */
26341 MIPS_INVAL("OPC_MXU_D16MADL");
26342 generate_exception_end(ctx
, EXCP_RI
);
26344 case OPC_MXU_S16MAD
:
26345 /* TODO: Implement emulation of S16MAD instruction. */
26346 MIPS_INVAL("OPC_MXU_S16MAD");
26347 generate_exception_end(ctx
, EXCP_RI
);
26349 case OPC_MXU_Q16ADD
:
26350 /* TODO: Implement emulation of Q16ADD instruction. */
26351 MIPS_INVAL("OPC_MXU_Q16ADD");
26352 generate_exception_end(ctx
, EXCP_RI
);
26354 case OPC_MXU_D16MACE
:
26355 /* TODO: Implement emulation of D16MACE instruction. */
26356 MIPS_INVAL("OPC_MXU_D16MACE");
26357 generate_exception_end(ctx
, EXCP_RI
);
26359 case OPC_MXU__POOL04
:
26360 decode_opc_mxu__pool04(env
, ctx
);
26362 case OPC_MXU__POOL05
:
26363 decode_opc_mxu__pool05(env
, ctx
);
26365 case OPC_MXU__POOL06
:
26366 decode_opc_mxu__pool06(env
, ctx
);
26368 case OPC_MXU__POOL07
:
26369 decode_opc_mxu__pool07(env
, ctx
);
26371 case OPC_MXU__POOL08
:
26372 decode_opc_mxu__pool08(env
, ctx
);
26374 case OPC_MXU__POOL09
:
26375 decode_opc_mxu__pool09(env
, ctx
);
26377 case OPC_MXU__POOL10
:
26378 decode_opc_mxu__pool10(env
, ctx
);
26380 case OPC_MXU__POOL11
:
26381 decode_opc_mxu__pool11(env
, ctx
);
26383 case OPC_MXU_D32ADD
:
26384 /* TODO: Implement emulation of D32ADD instruction. */
26385 MIPS_INVAL("OPC_MXU_D32ADD");
26386 generate_exception_end(ctx
, EXCP_RI
);
26388 case OPC_MXU__POOL12
:
26389 decode_opc_mxu__pool12(env
, ctx
);
26391 case OPC_MXU__POOL13
:
26392 decode_opc_mxu__pool13(env
, ctx
);
26394 case OPC_MXU__POOL14
:
26395 decode_opc_mxu__pool14(env
, ctx
);
26397 case OPC_MXU_Q8ACCE
:
26398 /* TODO: Implement emulation of Q8ACCE instruction. */
26399 MIPS_INVAL("OPC_MXU_Q8ACCE");
26400 generate_exception_end(ctx
, EXCP_RI
);
26402 case OPC_MXU_S8LDD
:
26403 gen_mxu_s8ldd(ctx
);
26405 case OPC_MXU_S8STD
:
26406 /* TODO: Implement emulation of S8STD instruction. */
26407 MIPS_INVAL("OPC_MXU_S8STD");
26408 generate_exception_end(ctx
, EXCP_RI
);
26410 case OPC_MXU_S8LDI
:
26411 /* TODO: Implement emulation of S8LDI instruction. */
26412 MIPS_INVAL("OPC_MXU_S8LDI");
26413 generate_exception_end(ctx
, EXCP_RI
);
26415 case OPC_MXU_S8SDI
:
26416 /* TODO: Implement emulation of S8SDI instruction. */
26417 MIPS_INVAL("OPC_MXU_S8SDI");
26418 generate_exception_end(ctx
, EXCP_RI
);
26420 case OPC_MXU__POOL15
:
26421 decode_opc_mxu__pool15(env
, ctx
);
26423 case OPC_MXU__POOL16
:
26424 decode_opc_mxu__pool16(env
, ctx
);
26426 case OPC_MXU__POOL17
:
26427 decode_opc_mxu__pool17(env
, ctx
);
26429 case OPC_MXU_S16LDD
:
26430 /* TODO: Implement emulation of S16LDD instruction. */
26431 MIPS_INVAL("OPC_MXU_S16LDD");
26432 generate_exception_end(ctx
, EXCP_RI
);
26434 case OPC_MXU_S16STD
:
26435 /* TODO: Implement emulation of S16STD instruction. */
26436 MIPS_INVAL("OPC_MXU_S16STD");
26437 generate_exception_end(ctx
, EXCP_RI
);
26439 case OPC_MXU_S16LDI
:
26440 /* TODO: Implement emulation of S16LDI instruction. */
26441 MIPS_INVAL("OPC_MXU_S16LDI");
26442 generate_exception_end(ctx
, EXCP_RI
);
26444 case OPC_MXU_S16SDI
:
26445 /* TODO: Implement emulation of S16SDI instruction. */
26446 MIPS_INVAL("OPC_MXU_S16SDI");
26447 generate_exception_end(ctx
, EXCP_RI
);
26449 case OPC_MXU_D32SLL
:
26450 /* TODO: Implement emulation of D32SLL instruction. */
26451 MIPS_INVAL("OPC_MXU_D32SLL");
26452 generate_exception_end(ctx
, EXCP_RI
);
26454 case OPC_MXU_D32SLR
:
26455 /* TODO: Implement emulation of D32SLR instruction. */
26456 MIPS_INVAL("OPC_MXU_D32SLR");
26457 generate_exception_end(ctx
, EXCP_RI
);
26459 case OPC_MXU_D32SARL
:
26460 /* TODO: Implement emulation of D32SARL instruction. */
26461 MIPS_INVAL("OPC_MXU_D32SARL");
26462 generate_exception_end(ctx
, EXCP_RI
);
26464 case OPC_MXU_D32SAR
:
26465 /* TODO: Implement emulation of D32SAR instruction. */
26466 MIPS_INVAL("OPC_MXU_D32SAR");
26467 generate_exception_end(ctx
, EXCP_RI
);
26469 case OPC_MXU_Q16SLL
:
26470 /* TODO: Implement emulation of Q16SLL instruction. */
26471 MIPS_INVAL("OPC_MXU_Q16SLL");
26472 generate_exception_end(ctx
, EXCP_RI
);
26474 case OPC_MXU_Q16SLR
:
26475 /* TODO: Implement emulation of Q16SLR instruction. */
26476 MIPS_INVAL("OPC_MXU_Q16SLR");
26477 generate_exception_end(ctx
, EXCP_RI
);
26479 case OPC_MXU__POOL18
:
26480 decode_opc_mxu__pool18(env
, ctx
);
26482 case OPC_MXU_Q16SAR
:
26483 /* TODO: Implement emulation of Q16SAR instruction. */
26484 MIPS_INVAL("OPC_MXU_Q16SAR");
26485 generate_exception_end(ctx
, EXCP_RI
);
26487 case OPC_MXU__POOL19
:
26488 decode_opc_mxu__pool19(env
, ctx
);
26490 case OPC_MXU__POOL20
:
26491 decode_opc_mxu__pool20(env
, ctx
);
26493 case OPC_MXU__POOL21
:
26494 decode_opc_mxu__pool21(env
, ctx
);
26496 case OPC_MXU_Q16SCOP
:
26497 /* TODO: Implement emulation of Q16SCOP instruction. */
26498 MIPS_INVAL("OPC_MXU_Q16SCOP");
26499 generate_exception_end(ctx
, EXCP_RI
);
26501 case OPC_MXU_Q8MADL
:
26502 /* TODO: Implement emulation of Q8MADL instruction. */
26503 MIPS_INVAL("OPC_MXU_Q8MADL");
26504 generate_exception_end(ctx
, EXCP_RI
);
26506 case OPC_MXU_S32SFL
:
26507 /* TODO: Implement emulation of S32SFL instruction. */
26508 MIPS_INVAL("OPC_MXU_S32SFL");
26509 generate_exception_end(ctx
, EXCP_RI
);
26511 case OPC_MXU_Q8SAD
:
26512 /* TODO: Implement emulation of Q8SAD instruction. */
26513 MIPS_INVAL("OPC_MXU_Q8SAD");
26514 generate_exception_end(ctx
, EXCP_RI
);
26517 MIPS_INVAL("decode_opc_mxu");
26518 generate_exception_end(ctx
, EXCP_RI
);
26521 gen_set_label(l_exit
);
26522 tcg_temp_free(t_mxu_cr
);
26526 #endif /* !defined(TARGET_MIPS64) */
26529 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26534 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26536 rs
= (ctx
->opcode
>> 21) & 0x1f;
26537 rt
= (ctx
->opcode
>> 16) & 0x1f;
26538 rd
= (ctx
->opcode
>> 11) & 0x1f;
26540 op1
= MASK_SPECIAL2(ctx
->opcode
);
26542 case OPC_MADD
: /* Multiply and add/sub */
26546 check_insn(ctx
, ISA_MIPS32
);
26547 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26550 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26553 case OPC_DIVU_G_2F
:
26554 case OPC_MULT_G_2F
:
26555 case OPC_MULTU_G_2F
:
26557 case OPC_MODU_G_2F
:
26558 check_insn(ctx
, INSN_LOONGSON2F
);
26559 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26563 check_insn(ctx
, ISA_MIPS32
);
26564 gen_cl(ctx
, op1
, rd
, rs
);
26567 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26568 gen_helper_do_semihosting(cpu_env
);
26570 /* XXX: not clear which exception should be raised
26571 * when in debug mode...
26573 check_insn(ctx
, ISA_MIPS32
);
26574 generate_exception_end(ctx
, EXCP_DBp
);
26577 #if defined(TARGET_MIPS64)
26580 check_insn(ctx
, ISA_MIPS64
);
26581 check_mips_64(ctx
);
26582 gen_cl(ctx
, op1
, rd
, rs
);
26584 case OPC_DMULT_G_2F
:
26585 case OPC_DMULTU_G_2F
:
26586 case OPC_DDIV_G_2F
:
26587 case OPC_DDIVU_G_2F
:
26588 case OPC_DMOD_G_2F
:
26589 case OPC_DMODU_G_2F
:
26590 check_insn(ctx
, INSN_LOONGSON2F
);
26591 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26594 default: /* Invalid */
26595 MIPS_INVAL("special2_legacy");
26596 generate_exception_end(ctx
, EXCP_RI
);
26601 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26603 int rs
, rt
, rd
, sa
;
26607 rs
= (ctx
->opcode
>> 21) & 0x1f;
26608 rt
= (ctx
->opcode
>> 16) & 0x1f;
26609 rd
= (ctx
->opcode
>> 11) & 0x1f;
26610 sa
= (ctx
->opcode
>> 6) & 0x1f;
26611 imm
= (int16_t)ctx
->opcode
>> 7;
26613 op1
= MASK_SPECIAL3(ctx
->opcode
);
26617 /* hint codes 24-31 are reserved and signal RI */
26618 generate_exception_end(ctx
, EXCP_RI
);
26620 /* Treat as NOP. */
26623 check_cp0_enabled(ctx
);
26624 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26625 gen_cache_operation(ctx
, rt
, rs
, imm
);
26629 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26632 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26637 /* Treat as NOP. */
26640 op2
= MASK_BSHFL(ctx
->opcode
);
26646 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
26649 gen_bitswap(ctx
, op2
, rd
, rt
);
26654 #if defined(TARGET_MIPS64)
26656 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
26659 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26662 check_mips_64(ctx
);
26665 /* Treat as NOP. */
26668 op2
= MASK_DBSHFL(ctx
->opcode
);
26678 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26681 gen_bitswap(ctx
, op2
, rd
, rt
);
26688 default: /* Invalid */
26689 MIPS_INVAL("special3_r6");
26690 generate_exception_end(ctx
, EXCP_RI
);
26695 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26700 rs
= (ctx
->opcode
>> 21) & 0x1f;
26701 rt
= (ctx
->opcode
>> 16) & 0x1f;
26702 rd
= (ctx
->opcode
>> 11) & 0x1f;
26704 op1
= MASK_SPECIAL3(ctx
->opcode
);
26707 case OPC_DIVU_G_2E
:
26709 case OPC_MODU_G_2E
:
26710 case OPC_MULT_G_2E
:
26711 case OPC_MULTU_G_2E
:
26712 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26713 * the same mask and op1. */
26714 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26715 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26718 case OPC_ADDUH_R_QB
:
26720 case OPC_ADDQH_R_PH
:
26722 case OPC_ADDQH_R_W
:
26724 case OPC_SUBUH_R_QB
:
26726 case OPC_SUBQH_R_PH
:
26728 case OPC_SUBQH_R_W
:
26729 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26734 case OPC_MULQ_RS_W
:
26735 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26738 MIPS_INVAL("MASK ADDUH.QB");
26739 generate_exception_end(ctx
, EXCP_RI
);
26742 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26743 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26745 generate_exception_end(ctx
, EXCP_RI
);
26749 op2
= MASK_LX(ctx
->opcode
);
26751 #if defined(TARGET_MIPS64)
26757 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26759 default: /* Invalid */
26760 MIPS_INVAL("MASK LX");
26761 generate_exception_end(ctx
, EXCP_RI
);
26765 case OPC_ABSQ_S_PH_DSP
:
26766 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26768 case OPC_ABSQ_S_QB
:
26769 case OPC_ABSQ_S_PH
:
26771 case OPC_PRECEQ_W_PHL
:
26772 case OPC_PRECEQ_W_PHR
:
26773 case OPC_PRECEQU_PH_QBL
:
26774 case OPC_PRECEQU_PH_QBR
:
26775 case OPC_PRECEQU_PH_QBLA
:
26776 case OPC_PRECEQU_PH_QBRA
:
26777 case OPC_PRECEU_PH_QBL
:
26778 case OPC_PRECEU_PH_QBR
:
26779 case OPC_PRECEU_PH_QBLA
:
26780 case OPC_PRECEU_PH_QBRA
:
26781 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26788 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26791 MIPS_INVAL("MASK ABSQ_S.PH");
26792 generate_exception_end(ctx
, EXCP_RI
);
26796 case OPC_ADDU_QB_DSP
:
26797 op2
= MASK_ADDU_QB(ctx
->opcode
);
26800 case OPC_ADDQ_S_PH
:
26803 case OPC_ADDU_S_QB
:
26805 case OPC_ADDU_S_PH
:
26807 case OPC_SUBQ_S_PH
:
26810 case OPC_SUBU_S_QB
:
26812 case OPC_SUBU_S_PH
:
26816 case OPC_RADDU_W_QB
:
26817 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26819 case OPC_MULEU_S_PH_QBL
:
26820 case OPC_MULEU_S_PH_QBR
:
26821 case OPC_MULQ_RS_PH
:
26822 case OPC_MULEQ_S_W_PHL
:
26823 case OPC_MULEQ_S_W_PHR
:
26824 case OPC_MULQ_S_PH
:
26825 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26827 default: /* Invalid */
26828 MIPS_INVAL("MASK ADDU.QB");
26829 generate_exception_end(ctx
, EXCP_RI
);
26834 case OPC_CMPU_EQ_QB_DSP
:
26835 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26837 case OPC_PRECR_SRA_PH_W
:
26838 case OPC_PRECR_SRA_R_PH_W
:
26839 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26841 case OPC_PRECR_QB_PH
:
26842 case OPC_PRECRQ_QB_PH
:
26843 case OPC_PRECRQ_PH_W
:
26844 case OPC_PRECRQ_RS_PH_W
:
26845 case OPC_PRECRQU_S_QB_PH
:
26846 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26848 case OPC_CMPU_EQ_QB
:
26849 case OPC_CMPU_LT_QB
:
26850 case OPC_CMPU_LE_QB
:
26851 case OPC_CMP_EQ_PH
:
26852 case OPC_CMP_LT_PH
:
26853 case OPC_CMP_LE_PH
:
26854 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26856 case OPC_CMPGU_EQ_QB
:
26857 case OPC_CMPGU_LT_QB
:
26858 case OPC_CMPGU_LE_QB
:
26859 case OPC_CMPGDU_EQ_QB
:
26860 case OPC_CMPGDU_LT_QB
:
26861 case OPC_CMPGDU_LE_QB
:
26864 case OPC_PACKRL_PH
:
26865 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26867 default: /* Invalid */
26868 MIPS_INVAL("MASK CMPU.EQ.QB");
26869 generate_exception_end(ctx
, EXCP_RI
);
26873 case OPC_SHLL_QB_DSP
:
26874 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26876 case OPC_DPA_W_PH_DSP
:
26877 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26879 case OPC_DPAU_H_QBL
:
26880 case OPC_DPAU_H_QBR
:
26881 case OPC_DPSU_H_QBL
:
26882 case OPC_DPSU_H_QBR
:
26884 case OPC_DPAX_W_PH
:
26885 case OPC_DPAQ_S_W_PH
:
26886 case OPC_DPAQX_S_W_PH
:
26887 case OPC_DPAQX_SA_W_PH
:
26889 case OPC_DPSX_W_PH
:
26890 case OPC_DPSQ_S_W_PH
:
26891 case OPC_DPSQX_S_W_PH
:
26892 case OPC_DPSQX_SA_W_PH
:
26893 case OPC_MULSAQ_S_W_PH
:
26894 case OPC_DPAQ_SA_L_W
:
26895 case OPC_DPSQ_SA_L_W
:
26896 case OPC_MAQ_S_W_PHL
:
26897 case OPC_MAQ_S_W_PHR
:
26898 case OPC_MAQ_SA_W_PHL
:
26899 case OPC_MAQ_SA_W_PHR
:
26900 case OPC_MULSA_W_PH
:
26901 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26903 default: /* Invalid */
26904 MIPS_INVAL("MASK DPAW.PH");
26905 generate_exception_end(ctx
, EXCP_RI
);
26910 op2
= MASK_INSV(ctx
->opcode
);
26921 t0
= tcg_temp_new();
26922 t1
= tcg_temp_new();
26924 gen_load_gpr(t0
, rt
);
26925 gen_load_gpr(t1
, rs
);
26927 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26933 default: /* Invalid */
26934 MIPS_INVAL("MASK INSV");
26935 generate_exception_end(ctx
, EXCP_RI
);
26939 case OPC_APPEND_DSP
:
26940 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26942 case OPC_EXTR_W_DSP
:
26943 op2
= MASK_EXTR_W(ctx
->opcode
);
26947 case OPC_EXTR_RS_W
:
26949 case OPC_EXTRV_S_H
:
26951 case OPC_EXTRV_R_W
:
26952 case OPC_EXTRV_RS_W
:
26957 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26960 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26966 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26968 default: /* Invalid */
26969 MIPS_INVAL("MASK EXTR.W");
26970 generate_exception_end(ctx
, EXCP_RI
);
26974 #if defined(TARGET_MIPS64)
26975 case OPC_DDIV_G_2E
:
26976 case OPC_DDIVU_G_2E
:
26977 case OPC_DMULT_G_2E
:
26978 case OPC_DMULTU_G_2E
:
26979 case OPC_DMOD_G_2E
:
26980 case OPC_DMODU_G_2E
:
26981 check_insn(ctx
, INSN_LOONGSON2E
);
26982 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26984 case OPC_ABSQ_S_QH_DSP
:
26985 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
26987 case OPC_PRECEQ_L_PWL
:
26988 case OPC_PRECEQ_L_PWR
:
26989 case OPC_PRECEQ_PW_QHL
:
26990 case OPC_PRECEQ_PW_QHR
:
26991 case OPC_PRECEQ_PW_QHLA
:
26992 case OPC_PRECEQ_PW_QHRA
:
26993 case OPC_PRECEQU_QH_OBL
:
26994 case OPC_PRECEQU_QH_OBR
:
26995 case OPC_PRECEQU_QH_OBLA
:
26996 case OPC_PRECEQU_QH_OBRA
:
26997 case OPC_PRECEU_QH_OBL
:
26998 case OPC_PRECEU_QH_OBR
:
26999 case OPC_PRECEU_QH_OBLA
:
27000 case OPC_PRECEU_QH_OBRA
:
27001 case OPC_ABSQ_S_OB
:
27002 case OPC_ABSQ_S_PW
:
27003 case OPC_ABSQ_S_QH
:
27004 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27012 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27014 default: /* Invalid */
27015 MIPS_INVAL("MASK ABSQ_S.QH");
27016 generate_exception_end(ctx
, EXCP_RI
);
27020 case OPC_ADDU_OB_DSP
:
27021 op2
= MASK_ADDU_OB(ctx
->opcode
);
27023 case OPC_RADDU_L_OB
:
27025 case OPC_SUBQ_S_PW
:
27027 case OPC_SUBQ_S_QH
:
27029 case OPC_SUBU_S_OB
:
27031 case OPC_SUBU_S_QH
:
27033 case OPC_SUBUH_R_OB
:
27035 case OPC_ADDQ_S_PW
:
27037 case OPC_ADDQ_S_QH
:
27039 case OPC_ADDU_S_OB
:
27041 case OPC_ADDU_S_QH
:
27043 case OPC_ADDUH_R_OB
:
27044 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27046 case OPC_MULEQ_S_PW_QHL
:
27047 case OPC_MULEQ_S_PW_QHR
:
27048 case OPC_MULEU_S_QH_OBL
:
27049 case OPC_MULEU_S_QH_OBR
:
27050 case OPC_MULQ_RS_QH
:
27051 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27053 default: /* Invalid */
27054 MIPS_INVAL("MASK ADDU.OB");
27055 generate_exception_end(ctx
, EXCP_RI
);
27059 case OPC_CMPU_EQ_OB_DSP
:
27060 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27062 case OPC_PRECR_SRA_QH_PW
:
27063 case OPC_PRECR_SRA_R_QH_PW
:
27064 /* Return value is rt. */
27065 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27067 case OPC_PRECR_OB_QH
:
27068 case OPC_PRECRQ_OB_QH
:
27069 case OPC_PRECRQ_PW_L
:
27070 case OPC_PRECRQ_QH_PW
:
27071 case OPC_PRECRQ_RS_QH_PW
:
27072 case OPC_PRECRQU_S_OB_QH
:
27073 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27075 case OPC_CMPU_EQ_OB
:
27076 case OPC_CMPU_LT_OB
:
27077 case OPC_CMPU_LE_OB
:
27078 case OPC_CMP_EQ_QH
:
27079 case OPC_CMP_LT_QH
:
27080 case OPC_CMP_LE_QH
:
27081 case OPC_CMP_EQ_PW
:
27082 case OPC_CMP_LT_PW
:
27083 case OPC_CMP_LE_PW
:
27084 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27086 case OPC_CMPGDU_EQ_OB
:
27087 case OPC_CMPGDU_LT_OB
:
27088 case OPC_CMPGDU_LE_OB
:
27089 case OPC_CMPGU_EQ_OB
:
27090 case OPC_CMPGU_LT_OB
:
27091 case OPC_CMPGU_LE_OB
:
27092 case OPC_PACKRL_PW
:
27096 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27098 default: /* Invalid */
27099 MIPS_INVAL("MASK CMPU_EQ.OB");
27100 generate_exception_end(ctx
, EXCP_RI
);
27104 case OPC_DAPPEND_DSP
:
27105 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27107 case OPC_DEXTR_W_DSP
:
27108 op2
= MASK_DEXTR_W(ctx
->opcode
);
27115 case OPC_DEXTR_R_L
:
27116 case OPC_DEXTR_RS_L
:
27118 case OPC_DEXTR_R_W
:
27119 case OPC_DEXTR_RS_W
:
27120 case OPC_DEXTR_S_H
:
27122 case OPC_DEXTRV_R_L
:
27123 case OPC_DEXTRV_RS_L
:
27124 case OPC_DEXTRV_S_H
:
27126 case OPC_DEXTRV_R_W
:
27127 case OPC_DEXTRV_RS_W
:
27128 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27133 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27135 default: /* Invalid */
27136 MIPS_INVAL("MASK EXTR.W");
27137 generate_exception_end(ctx
, EXCP_RI
);
27141 case OPC_DPAQ_W_QH_DSP
:
27142 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27144 case OPC_DPAU_H_OBL
:
27145 case OPC_DPAU_H_OBR
:
27146 case OPC_DPSU_H_OBL
:
27147 case OPC_DPSU_H_OBR
:
27149 case OPC_DPAQ_S_W_QH
:
27151 case OPC_DPSQ_S_W_QH
:
27152 case OPC_MULSAQ_S_W_QH
:
27153 case OPC_DPAQ_SA_L_PW
:
27154 case OPC_DPSQ_SA_L_PW
:
27155 case OPC_MULSAQ_S_L_PW
:
27156 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27158 case OPC_MAQ_S_W_QHLL
:
27159 case OPC_MAQ_S_W_QHLR
:
27160 case OPC_MAQ_S_W_QHRL
:
27161 case OPC_MAQ_S_W_QHRR
:
27162 case OPC_MAQ_SA_W_QHLL
:
27163 case OPC_MAQ_SA_W_QHLR
:
27164 case OPC_MAQ_SA_W_QHRL
:
27165 case OPC_MAQ_SA_W_QHRR
:
27166 case OPC_MAQ_S_L_PWL
:
27167 case OPC_MAQ_S_L_PWR
:
27172 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27174 default: /* Invalid */
27175 MIPS_INVAL("MASK DPAQ.W.QH");
27176 generate_exception_end(ctx
, EXCP_RI
);
27180 case OPC_DINSV_DSP
:
27181 op2
= MASK_INSV(ctx
->opcode
);
27192 t0
= tcg_temp_new();
27193 t1
= tcg_temp_new();
27195 gen_load_gpr(t0
, rt
);
27196 gen_load_gpr(t1
, rs
);
27198 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27204 default: /* Invalid */
27205 MIPS_INVAL("MASK DINSV");
27206 generate_exception_end(ctx
, EXCP_RI
);
27210 case OPC_SHLL_OB_DSP
:
27211 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27214 default: /* Invalid */
27215 MIPS_INVAL("special3_legacy");
27216 generate_exception_end(ctx
, EXCP_RI
);
27221 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27223 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27226 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27227 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27228 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27229 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27230 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27231 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27232 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27233 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27234 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27235 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27236 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27237 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27238 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27239 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27240 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27241 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27242 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27243 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27244 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27245 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27246 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27247 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27248 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27249 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27250 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27251 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27254 MIPS_INVAL("TX79 MMI class MMI0");
27255 generate_exception_end(ctx
, EXCP_RI
);
27260 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27262 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27265 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27266 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27267 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27268 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27269 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27270 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27271 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27272 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27273 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27274 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27275 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27276 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27277 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27278 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27279 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27280 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27281 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27282 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27283 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27286 MIPS_INVAL("TX79 MMI class MMI1");
27287 generate_exception_end(ctx
, EXCP_RI
);
27292 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27294 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27297 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27298 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27299 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27300 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27301 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27302 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27303 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27304 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27305 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27306 case MMI_OPC_2_PCPYLD
: /* TODO: MMI_OPC_2_PCPYLD */
27307 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27308 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27309 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27310 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27311 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27312 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27313 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27314 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27315 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27316 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27317 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27318 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27319 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27322 MIPS_INVAL("TX79 MMI class MMI2");
27323 generate_exception_end(ctx
, EXCP_RI
);
27328 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27330 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27333 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27334 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27335 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27336 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27337 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27338 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27339 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27340 case MMI_OPC_3_PCPYUD
: /* TODO: MMI_OPC_3_PCPYUD */
27341 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27342 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27343 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27344 case MMI_OPC_3_PCPYH
: /* TODO: MMI_OPC_3_PCPYH */
27345 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27346 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27349 MIPS_INVAL("TX79 MMI class MMI3");
27350 generate_exception_end(ctx
, EXCP_RI
);
27355 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27357 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27358 int rs
= extract32(ctx
->opcode
, 21, 5);
27359 int rt
= extract32(ctx
->opcode
, 16, 5);
27360 int rd
= extract32(ctx
->opcode
, 11, 5);
27363 case MMI_OPC_CLASS_MMI0
:
27364 decode_mmi0(env
, ctx
);
27366 case MMI_OPC_CLASS_MMI1
:
27367 decode_mmi1(env
, ctx
);
27369 case MMI_OPC_CLASS_MMI2
:
27370 decode_mmi2(env
, ctx
);
27372 case MMI_OPC_CLASS_MMI3
:
27373 decode_mmi3(env
, ctx
);
27375 case MMI_OPC_MULT1
:
27376 case MMI_OPC_MULTU1
:
27378 case MMI_OPC_MADDU
:
27379 case MMI_OPC_MADD1
:
27380 case MMI_OPC_MADDU1
:
27381 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27384 case MMI_OPC_DIVU1
:
27385 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27387 case MMI_OPC_MTLO1
:
27388 case MMI_OPC_MTHI1
:
27389 gen_HILO1_tx79(ctx
, opc
, rs
);
27391 case MMI_OPC_MFLO1
:
27392 case MMI_OPC_MFHI1
:
27393 gen_HILO1_tx79(ctx
, opc
, rd
);
27395 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27396 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27397 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27398 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27399 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27400 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27401 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27402 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27403 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27404 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27407 MIPS_INVAL("TX79 MMI class");
27408 generate_exception_end(ctx
, EXCP_RI
);
27413 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27415 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27418 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27420 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27424 * The TX79-specific instruction Store Quadword
27426 * +--------+-------+-------+------------------------+
27427 * | 011111 | base | rt | offset | SQ
27428 * +--------+-------+-------+------------------------+
27431 * has the same opcode as the Read Hardware Register instruction
27433 * +--------+-------+-------+-------+-------+--------+
27434 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27435 * +--------+-------+-------+-------+-------+--------+
27438 * that is required, trapped and emulated by the Linux kernel. However, all
27439 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27440 * offset is odd. Therefore all valid SQ instructions can execute normally.
27441 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27442 * between SQ and RDHWR, as the Linux kernel does.
27444 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27446 int base
= extract32(ctx
->opcode
, 21, 5);
27447 int rt
= extract32(ctx
->opcode
, 16, 5);
27448 int offset
= extract32(ctx
->opcode
, 0, 16);
27450 #ifdef CONFIG_USER_ONLY
27451 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27452 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27454 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27455 int rd
= extract32(ctx
->opcode
, 11, 5);
27457 gen_rdhwr(ctx
, rt
, rd
, 0);
27462 gen_mmi_sq(ctx
, base
, rt
, offset
);
27465 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27467 int rs
, rt
, rd
, sa
;
27471 rs
= (ctx
->opcode
>> 21) & 0x1f;
27472 rt
= (ctx
->opcode
>> 16) & 0x1f;
27473 rd
= (ctx
->opcode
>> 11) & 0x1f;
27474 sa
= (ctx
->opcode
>> 6) & 0x1f;
27475 imm
= sextract32(ctx
->opcode
, 7, 9);
27477 op1
= MASK_SPECIAL3(ctx
->opcode
);
27480 * EVA loads and stores overlap Loongson 2E instructions decoded by
27481 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27488 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27496 check_cp0_enabled(ctx
);
27497 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27501 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27506 check_cp0_enabled(ctx
);
27507 gen_st(ctx
, op1
, rt
, rs
, imm
);
27510 check_cp0_enabled(ctx
);
27511 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
27514 check_cp0_enabled(ctx
);
27515 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27516 gen_cache_operation(ctx
, rt
, rs
, imm
);
27518 /* Treat as NOP. */
27521 check_cp0_enabled(ctx
);
27522 /* Treat as NOP. */
27530 check_insn(ctx
, ISA_MIPS32R2
);
27531 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27534 op2
= MASK_BSHFL(ctx
->opcode
);
27541 check_insn(ctx
, ISA_MIPS32R6
);
27542 decode_opc_special3_r6(env
, ctx
);
27545 check_insn(ctx
, ISA_MIPS32R2
);
27546 gen_bshfl(ctx
, op2
, rt
, rd
);
27550 #if defined(TARGET_MIPS64)
27557 check_insn(ctx
, ISA_MIPS64R2
);
27558 check_mips_64(ctx
);
27559 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27562 op2
= MASK_DBSHFL(ctx
->opcode
);
27573 check_insn(ctx
, ISA_MIPS32R6
);
27574 decode_opc_special3_r6(env
, ctx
);
27577 check_insn(ctx
, ISA_MIPS64R2
);
27578 check_mips_64(ctx
);
27579 op2
= MASK_DBSHFL(ctx
->opcode
);
27580 gen_bshfl(ctx
, op2
, rt
, rd
);
27586 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27591 TCGv t0
= tcg_temp_new();
27592 TCGv t1
= tcg_temp_new();
27594 gen_load_gpr(t0
, rt
);
27595 gen_load_gpr(t1
, rs
);
27596 gen_helper_fork(t0
, t1
);
27604 TCGv t0
= tcg_temp_new();
27606 gen_load_gpr(t0
, rs
);
27607 gen_helper_yield(t0
, cpu_env
, t0
);
27608 gen_store_gpr(t0
, rd
);
27613 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27614 decode_opc_special3_r6(env
, ctx
);
27616 decode_opc_special3_legacy(env
, ctx
);
27621 /* MIPS SIMD Architecture (MSA) */
27622 static inline int check_msa_access(DisasContext
*ctx
)
27624 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
27625 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
27626 generate_exception_end(ctx
, EXCP_RI
);
27630 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
27631 if (ctx
->insn_flags
& ASE_MSA
) {
27632 generate_exception_end(ctx
, EXCP_MSADIS
);
27635 generate_exception_end(ctx
, EXCP_RI
);
27642 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
27644 /* generates tcg ops to check if any element is 0 */
27645 /* Note this function only works with MSA_WRLEN = 128 */
27646 uint64_t eval_zero_or_big
= 0;
27647 uint64_t eval_big
= 0;
27648 TCGv_i64 t0
= tcg_temp_new_i64();
27649 TCGv_i64 t1
= tcg_temp_new_i64();
27652 eval_zero_or_big
= 0x0101010101010101ULL
;
27653 eval_big
= 0x8080808080808080ULL
;
27656 eval_zero_or_big
= 0x0001000100010001ULL
;
27657 eval_big
= 0x8000800080008000ULL
;
27660 eval_zero_or_big
= 0x0000000100000001ULL
;
27661 eval_big
= 0x8000000080000000ULL
;
27664 eval_zero_or_big
= 0x0000000000000001ULL
;
27665 eval_big
= 0x8000000000000000ULL
;
27668 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
27669 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
27670 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27671 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
27672 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
27673 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27674 tcg_gen_or_i64(t0
, t0
, t1
);
27675 /* if all bits are zero then all elements are not zero */
27676 /* if some bit is non-zero then some element is zero */
27677 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27678 tcg_gen_trunc_i64_tl(tresult
, t0
);
27679 tcg_temp_free_i64(t0
);
27680 tcg_temp_free_i64(t1
);
27683 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27685 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27686 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27687 int64_t s16
= (int16_t)ctx
->opcode
;
27689 check_msa_access(ctx
);
27691 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27692 generate_exception_end(ctx
, EXCP_RI
);
27699 TCGv_i64 t0
= tcg_temp_new_i64();
27700 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
27701 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27702 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27703 tcg_gen_trunc_i64_tl(bcond
, t0
);
27704 tcg_temp_free_i64(t0
);
27711 gen_check_zero_element(bcond
, df
, wt
);
27717 gen_check_zero_element(bcond
, df
, wt
);
27718 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
27722 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
27724 ctx
->hflags
|= MIPS_HFLAG_BC
;
27725 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
27728 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
27730 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
27731 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
27732 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27733 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27735 TCGv_i32 twd
= tcg_const_i32(wd
);
27736 TCGv_i32 tws
= tcg_const_i32(ws
);
27737 TCGv_i32 ti8
= tcg_const_i32(i8
);
27739 switch (MASK_MSA_I8(ctx
->opcode
)) {
27741 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
27744 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
27747 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
27750 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
27753 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
27756 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
27759 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
27765 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
27766 if (df
== DF_DOUBLE
) {
27767 generate_exception_end(ctx
, EXCP_RI
);
27769 TCGv_i32 tdf
= tcg_const_i32(df
);
27770 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
27771 tcg_temp_free_i32(tdf
);
27776 MIPS_INVAL("MSA instruction");
27777 generate_exception_end(ctx
, EXCP_RI
);
27781 tcg_temp_free_i32(twd
);
27782 tcg_temp_free_i32(tws
);
27783 tcg_temp_free_i32(ti8
);
27786 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
27788 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27789 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27790 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
27791 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
27792 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27793 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27795 TCGv_i32 tdf
= tcg_const_i32(df
);
27796 TCGv_i32 twd
= tcg_const_i32(wd
);
27797 TCGv_i32 tws
= tcg_const_i32(ws
);
27798 TCGv_i32 timm
= tcg_temp_new_i32();
27799 tcg_gen_movi_i32(timm
, u5
);
27801 switch (MASK_MSA_I5(ctx
->opcode
)) {
27803 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27806 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27808 case OPC_MAXI_S_df
:
27809 tcg_gen_movi_i32(timm
, s5
);
27810 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27812 case OPC_MAXI_U_df
:
27813 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27815 case OPC_MINI_S_df
:
27816 tcg_gen_movi_i32(timm
, s5
);
27817 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27819 case OPC_MINI_U_df
:
27820 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27823 tcg_gen_movi_i32(timm
, s5
);
27824 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
27826 case OPC_CLTI_S_df
:
27827 tcg_gen_movi_i32(timm
, s5
);
27828 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27830 case OPC_CLTI_U_df
:
27831 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27833 case OPC_CLEI_S_df
:
27834 tcg_gen_movi_i32(timm
, s5
);
27835 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
27837 case OPC_CLEI_U_df
:
27838 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
27842 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
27843 tcg_gen_movi_i32(timm
, s10
);
27844 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
27848 MIPS_INVAL("MSA instruction");
27849 generate_exception_end(ctx
, EXCP_RI
);
27853 tcg_temp_free_i32(tdf
);
27854 tcg_temp_free_i32(twd
);
27855 tcg_temp_free_i32(tws
);
27856 tcg_temp_free_i32(timm
);
27859 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
27861 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27862 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
27863 uint32_t df
= 0, m
= 0;
27864 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27865 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27872 if ((dfm
& 0x40) == 0x00) {
27875 } else if ((dfm
& 0x60) == 0x40) {
27878 } else if ((dfm
& 0x70) == 0x60) {
27881 } else if ((dfm
& 0x78) == 0x70) {
27885 generate_exception_end(ctx
, EXCP_RI
);
27889 tdf
= tcg_const_i32(df
);
27890 tm
= tcg_const_i32(m
);
27891 twd
= tcg_const_i32(wd
);
27892 tws
= tcg_const_i32(ws
);
27894 switch (MASK_MSA_BIT(ctx
->opcode
)) {
27896 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27899 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
27902 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27905 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27908 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
27911 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
27913 case OPC_BINSLI_df
:
27914 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
27916 case OPC_BINSRI_df
:
27917 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27920 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
27923 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
27926 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
27929 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
27932 MIPS_INVAL("MSA instruction");
27933 generate_exception_end(ctx
, EXCP_RI
);
27937 tcg_temp_free_i32(tdf
);
27938 tcg_temp_free_i32(tm
);
27939 tcg_temp_free_i32(twd
);
27940 tcg_temp_free_i32(tws
);
27943 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
27945 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
27946 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27947 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27948 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
27949 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
27951 TCGv_i32 tdf
= tcg_const_i32(df
);
27952 TCGv_i32 twd
= tcg_const_i32(wd
);
27953 TCGv_i32 tws
= tcg_const_i32(ws
);
27954 TCGv_i32 twt
= tcg_const_i32(wt
);
27956 switch (MASK_MSA_3R(ctx
->opcode
)) {
27958 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
27961 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27964 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
27967 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27969 case OPC_SUBS_S_df
:
27970 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
27973 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27976 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
27979 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
27982 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
27985 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27987 case OPC_ADDS_A_df
:
27988 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
27990 case OPC_SUBS_U_df
:
27991 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
27994 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
27997 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28000 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28003 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28006 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28009 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28011 case OPC_ADDS_S_df
:
28012 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28014 case OPC_SUBSUS_U_df
:
28015 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28018 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28021 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28024 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28027 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28030 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28033 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28035 case OPC_ADDS_U_df
:
28036 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28038 case OPC_SUBSUU_S_df
:
28039 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28042 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28045 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28048 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28051 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28054 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28056 case OPC_ASUB_S_df
:
28057 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28060 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28063 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28066 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28069 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28072 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28075 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28077 case OPC_ASUB_U_df
:
28078 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28081 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28084 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28087 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28090 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28092 case OPC_AVER_S_df
:
28093 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28096 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28099 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28102 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28105 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28107 case OPC_AVER_U_df
:
28108 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28111 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28114 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28117 case OPC_DOTP_S_df
:
28118 case OPC_DOTP_U_df
:
28119 case OPC_DPADD_S_df
:
28120 case OPC_DPADD_U_df
:
28121 case OPC_DPSUB_S_df
:
28122 case OPC_HADD_S_df
:
28123 case OPC_DPSUB_U_df
:
28124 case OPC_HADD_U_df
:
28125 case OPC_HSUB_S_df
:
28126 case OPC_HSUB_U_df
:
28127 if (df
== DF_BYTE
) {
28128 generate_exception_end(ctx
, EXCP_RI
);
28131 switch (MASK_MSA_3R(ctx
->opcode
)) {
28132 case OPC_DOTP_S_df
:
28133 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28135 case OPC_DOTP_U_df
:
28136 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28138 case OPC_DPADD_S_df
:
28139 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28141 case OPC_DPADD_U_df
:
28142 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28144 case OPC_DPSUB_S_df
:
28145 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28147 case OPC_HADD_S_df
:
28148 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28150 case OPC_DPSUB_U_df
:
28151 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28153 case OPC_HADD_U_df
:
28154 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28156 case OPC_HSUB_S_df
:
28157 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28159 case OPC_HSUB_U_df
:
28160 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28165 MIPS_INVAL("MSA instruction");
28166 generate_exception_end(ctx
, EXCP_RI
);
28169 tcg_temp_free_i32(twd
);
28170 tcg_temp_free_i32(tws
);
28171 tcg_temp_free_i32(twt
);
28172 tcg_temp_free_i32(tdf
);
28175 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28177 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28178 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28179 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28180 TCGv telm
= tcg_temp_new();
28181 TCGv_i32 tsr
= tcg_const_i32(source
);
28182 TCGv_i32 tdt
= tcg_const_i32(dest
);
28184 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28186 gen_load_gpr(telm
, source
);
28187 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28190 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28191 gen_store_gpr(telm
, dest
);
28194 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28197 MIPS_INVAL("MSA instruction");
28198 generate_exception_end(ctx
, EXCP_RI
);
28202 tcg_temp_free(telm
);
28203 tcg_temp_free_i32(tdt
);
28204 tcg_temp_free_i32(tsr
);
28207 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28210 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28211 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28212 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28214 TCGv_i32 tws
= tcg_const_i32(ws
);
28215 TCGv_i32 twd
= tcg_const_i32(wd
);
28216 TCGv_i32 tn
= tcg_const_i32(n
);
28217 TCGv_i32 tdf
= tcg_const_i32(df
);
28219 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28221 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28223 case OPC_SPLATI_df
:
28224 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28227 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28229 case OPC_COPY_S_df
:
28230 case OPC_COPY_U_df
:
28231 case OPC_INSERT_df
:
28232 #if !defined(TARGET_MIPS64)
28233 /* Double format valid only for MIPS64 */
28234 if (df
== DF_DOUBLE
) {
28235 generate_exception_end(ctx
, EXCP_RI
);
28239 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28240 case OPC_COPY_S_df
:
28241 if (likely(wd
!= 0)) {
28242 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
28245 case OPC_COPY_U_df
:
28246 if (likely(wd
!= 0)) {
28247 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
28250 case OPC_INSERT_df
:
28251 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
28256 MIPS_INVAL("MSA instruction");
28257 generate_exception_end(ctx
, EXCP_RI
);
28259 tcg_temp_free_i32(twd
);
28260 tcg_temp_free_i32(tws
);
28261 tcg_temp_free_i32(tn
);
28262 tcg_temp_free_i32(tdf
);
28265 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28267 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28268 uint32_t df
= 0, n
= 0;
28270 if ((dfn
& 0x30) == 0x00) {
28273 } else if ((dfn
& 0x38) == 0x20) {
28276 } else if ((dfn
& 0x3c) == 0x30) {
28279 } else if ((dfn
& 0x3e) == 0x38) {
28282 } else if (dfn
== 0x3E) {
28283 /* CTCMSA, CFCMSA, MOVE.V */
28284 gen_msa_elm_3e(env
, ctx
);
28287 generate_exception_end(ctx
, EXCP_RI
);
28291 gen_msa_elm_df(env
, ctx
, df
, n
);
28294 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28296 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28297 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28298 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28299 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28300 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28302 TCGv_i32 twd
= tcg_const_i32(wd
);
28303 TCGv_i32 tws
= tcg_const_i32(ws
);
28304 TCGv_i32 twt
= tcg_const_i32(wt
);
28305 TCGv_i32 tdf
= tcg_temp_new_i32();
28307 /* adjust df value for floating-point instruction */
28308 tcg_gen_movi_i32(tdf
, df
+ 2);
28310 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28312 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28315 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28318 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28321 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28324 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28327 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28330 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28333 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28336 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28339 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28342 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28345 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28348 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28351 tcg_gen_movi_i32(tdf
, df
+ 1);
28352 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28355 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28358 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28360 case OPC_MADD_Q_df
:
28361 tcg_gen_movi_i32(tdf
, df
+ 1);
28362 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28365 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28367 case OPC_MSUB_Q_df
:
28368 tcg_gen_movi_i32(tdf
, df
+ 1);
28369 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28372 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28375 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28378 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28381 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28384 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28387 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28390 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28393 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28396 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28399 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28402 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28405 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28408 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28410 case OPC_MULR_Q_df
:
28411 tcg_gen_movi_i32(tdf
, df
+ 1);
28412 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28415 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28417 case OPC_FMIN_A_df
:
28418 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28420 case OPC_MADDR_Q_df
:
28421 tcg_gen_movi_i32(tdf
, df
+ 1);
28422 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28425 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28428 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28430 case OPC_MSUBR_Q_df
:
28431 tcg_gen_movi_i32(tdf
, df
+ 1);
28432 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28435 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28437 case OPC_FMAX_A_df
:
28438 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28441 MIPS_INVAL("MSA instruction");
28442 generate_exception_end(ctx
, EXCP_RI
);
28446 tcg_temp_free_i32(twd
);
28447 tcg_temp_free_i32(tws
);
28448 tcg_temp_free_i32(twt
);
28449 tcg_temp_free_i32(tdf
);
28452 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28454 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28455 (op & (0x7 << 18)))
28456 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28457 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28458 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28459 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28460 TCGv_i32 twd
= tcg_const_i32(wd
);
28461 TCGv_i32 tws
= tcg_const_i32(ws
);
28462 TCGv_i32 twt
= tcg_const_i32(wt
);
28463 TCGv_i32 tdf
= tcg_const_i32(df
);
28465 switch (MASK_MSA_2R(ctx
->opcode
)) {
28467 #if !defined(TARGET_MIPS64)
28468 /* Double format valid only for MIPS64 */
28469 if (df
== DF_DOUBLE
) {
28470 generate_exception_end(ctx
, EXCP_RI
);
28474 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28477 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28480 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28483 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28486 MIPS_INVAL("MSA instruction");
28487 generate_exception_end(ctx
, EXCP_RI
);
28491 tcg_temp_free_i32(twd
);
28492 tcg_temp_free_i32(tws
);
28493 tcg_temp_free_i32(twt
);
28494 tcg_temp_free_i32(tdf
);
28497 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28499 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28500 (op & (0xf << 17)))
28501 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28502 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28503 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28504 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28505 TCGv_i32 twd
= tcg_const_i32(wd
);
28506 TCGv_i32 tws
= tcg_const_i32(ws
);
28507 TCGv_i32 twt
= tcg_const_i32(wt
);
28508 /* adjust df value for floating-point instruction */
28509 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28511 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28512 case OPC_FCLASS_df
:
28513 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28515 case OPC_FTRUNC_S_df
:
28516 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28518 case OPC_FTRUNC_U_df
:
28519 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28522 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28524 case OPC_FRSQRT_df
:
28525 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28528 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28531 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28534 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28536 case OPC_FEXUPL_df
:
28537 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28539 case OPC_FEXUPR_df
:
28540 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28543 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28546 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28548 case OPC_FTINT_S_df
:
28549 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28551 case OPC_FTINT_U_df
:
28552 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28554 case OPC_FFINT_S_df
:
28555 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28557 case OPC_FFINT_U_df
:
28558 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
28562 tcg_temp_free_i32(twd
);
28563 tcg_temp_free_i32(tws
);
28564 tcg_temp_free_i32(twt
);
28565 tcg_temp_free_i32(tdf
);
28568 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
28570 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
28571 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28572 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28573 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28574 TCGv_i32 twd
= tcg_const_i32(wd
);
28575 TCGv_i32 tws
= tcg_const_i32(ws
);
28576 TCGv_i32 twt
= tcg_const_i32(wt
);
28578 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28580 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
28583 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
28586 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
28589 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
28592 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
28595 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
28598 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
28601 MIPS_INVAL("MSA instruction");
28602 generate_exception_end(ctx
, EXCP_RI
);
28606 tcg_temp_free_i32(twd
);
28607 tcg_temp_free_i32(tws
);
28608 tcg_temp_free_i32(twt
);
28611 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
28613 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28621 gen_msa_vec_v(env
, ctx
);
28624 gen_msa_2r(env
, ctx
);
28627 gen_msa_2rf(env
, ctx
);
28630 MIPS_INVAL("MSA instruction");
28631 generate_exception_end(ctx
, EXCP_RI
);
28636 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
28638 uint32_t opcode
= ctx
->opcode
;
28639 check_insn(ctx
, ASE_MSA
);
28640 check_msa_access(ctx
);
28642 switch (MASK_MSA_MINOR(opcode
)) {
28643 case OPC_MSA_I8_00
:
28644 case OPC_MSA_I8_01
:
28645 case OPC_MSA_I8_02
:
28646 gen_msa_i8(env
, ctx
);
28648 case OPC_MSA_I5_06
:
28649 case OPC_MSA_I5_07
:
28650 gen_msa_i5(env
, ctx
);
28652 case OPC_MSA_BIT_09
:
28653 case OPC_MSA_BIT_0A
:
28654 gen_msa_bit(env
, ctx
);
28656 case OPC_MSA_3R_0D
:
28657 case OPC_MSA_3R_0E
:
28658 case OPC_MSA_3R_0F
:
28659 case OPC_MSA_3R_10
:
28660 case OPC_MSA_3R_11
:
28661 case OPC_MSA_3R_12
:
28662 case OPC_MSA_3R_13
:
28663 case OPC_MSA_3R_14
:
28664 case OPC_MSA_3R_15
:
28665 gen_msa_3r(env
, ctx
);
28668 gen_msa_elm(env
, ctx
);
28670 case OPC_MSA_3RF_1A
:
28671 case OPC_MSA_3RF_1B
:
28672 case OPC_MSA_3RF_1C
:
28673 gen_msa_3rf(env
, ctx
);
28676 gen_msa_vec(env
, ctx
);
28687 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
28688 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
28689 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28690 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
28692 TCGv_i32 twd
= tcg_const_i32(wd
);
28693 TCGv taddr
= tcg_temp_new();
28694 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
28696 switch (MASK_MSA_MINOR(opcode
)) {
28698 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
28701 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
28704 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
28707 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
28710 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
28713 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
28716 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
28719 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
28723 tcg_temp_free_i32(twd
);
28724 tcg_temp_free(taddr
);
28728 MIPS_INVAL("MSA instruction");
28729 generate_exception_end(ctx
, EXCP_RI
);
28735 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
28738 int rs
, rt
, rd
, sa
;
28742 /* make sure instructions are on a word boundary */
28743 if (ctx
->base
.pc_next
& 0x3) {
28744 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
28745 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
28749 /* Handle blikely not taken case */
28750 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
28751 TCGLabel
*l1
= gen_new_label();
28753 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
28754 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
28755 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
28759 op
= MASK_OP_MAJOR(ctx
->opcode
);
28760 rs
= (ctx
->opcode
>> 21) & 0x1f;
28761 rt
= (ctx
->opcode
>> 16) & 0x1f;
28762 rd
= (ctx
->opcode
>> 11) & 0x1f;
28763 sa
= (ctx
->opcode
>> 6) & 0x1f;
28764 imm
= (int16_t)ctx
->opcode
;
28767 decode_opc_special(env
, ctx
);
28770 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
28771 decode_mmi(env
, ctx
);
28772 #if !defined(TARGET_MIPS64)
28773 } else if (ctx
->insn_flags
& ASE_MXU
) {
28774 decode_opc_mxu(env
, ctx
);
28777 decode_opc_special2_legacy(env
, ctx
);
28781 if (ctx
->insn_flags
& INSN_R5900
) {
28782 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
28784 decode_opc_special3(env
, ctx
);
28788 op1
= MASK_REGIMM(ctx
->opcode
);
28790 case OPC_BLTZL
: /* REGIMM branches */
28794 check_insn(ctx
, ISA_MIPS2
);
28795 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28799 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28803 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28805 /* OPC_NAL, OPC_BAL */
28806 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
28808 generate_exception_end(ctx
, EXCP_RI
);
28811 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
28814 case OPC_TGEI
: /* REGIMM traps */
28821 check_insn(ctx
, ISA_MIPS2
);
28822 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
28823 gen_trap(ctx
, op1
, rs
, -1, imm
);
28826 check_insn(ctx
, ISA_MIPS32R6
);
28827 generate_exception_end(ctx
, EXCP_RI
);
28830 check_insn(ctx
, ISA_MIPS32R2
);
28831 /* Break the TB to be able to sync copied instructions
28833 ctx
->base
.is_jmp
= DISAS_STOP
;
28835 case OPC_BPOSGE32
: /* MIPS DSP branch */
28836 #if defined(TARGET_MIPS64)
28840 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
28842 #if defined(TARGET_MIPS64)
28844 check_insn(ctx
, ISA_MIPS32R6
);
28845 check_mips_64(ctx
);
28847 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
28851 check_insn(ctx
, ISA_MIPS32R6
);
28852 check_mips_64(ctx
);
28854 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
28858 default: /* Invalid */
28859 MIPS_INVAL("regimm");
28860 generate_exception_end(ctx
, EXCP_RI
);
28865 check_cp0_enabled(ctx
);
28866 op1
= MASK_CP0(ctx
->opcode
);
28874 #if defined(TARGET_MIPS64)
28878 #ifndef CONFIG_USER_ONLY
28879 gen_cp0(env
, ctx
, op1
, rt
, rd
);
28880 #endif /* !CONFIG_USER_ONLY */
28898 #ifndef CONFIG_USER_ONLY
28899 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
28900 #endif /* !CONFIG_USER_ONLY */
28903 #ifndef CONFIG_USER_ONLY
28906 TCGv t0
= tcg_temp_new();
28908 op2
= MASK_MFMC0(ctx
->opcode
);
28912 gen_helper_dmt(t0
);
28913 gen_store_gpr(t0
, rt
);
28917 gen_helper_emt(t0
);
28918 gen_store_gpr(t0
, rt
);
28922 gen_helper_dvpe(t0
, cpu_env
);
28923 gen_store_gpr(t0
, rt
);
28927 gen_helper_evpe(t0
, cpu_env
);
28928 gen_store_gpr(t0
, rt
);
28931 check_insn(ctx
, ISA_MIPS32R6
);
28933 gen_helper_dvp(t0
, cpu_env
);
28934 gen_store_gpr(t0
, rt
);
28938 check_insn(ctx
, ISA_MIPS32R6
);
28940 gen_helper_evp(t0
, cpu_env
);
28941 gen_store_gpr(t0
, rt
);
28945 check_insn(ctx
, ISA_MIPS32R2
);
28946 save_cpu_state(ctx
, 1);
28947 gen_helper_di(t0
, cpu_env
);
28948 gen_store_gpr(t0
, rt
);
28949 /* Stop translation as we may have switched
28950 the execution mode. */
28951 ctx
->base
.is_jmp
= DISAS_STOP
;
28954 check_insn(ctx
, ISA_MIPS32R2
);
28955 save_cpu_state(ctx
, 1);
28956 gen_helper_ei(t0
, cpu_env
);
28957 gen_store_gpr(t0
, rt
);
28958 /* DISAS_STOP isn't sufficient, we need to ensure we break
28959 out of translated code to check for pending interrupts */
28960 gen_save_pc(ctx
->base
.pc_next
+ 4);
28961 ctx
->base
.is_jmp
= DISAS_EXIT
;
28963 default: /* Invalid */
28964 MIPS_INVAL("mfmc0");
28965 generate_exception_end(ctx
, EXCP_RI
);
28970 #endif /* !CONFIG_USER_ONLY */
28973 check_insn(ctx
, ISA_MIPS32R2
);
28974 gen_load_srsgpr(rt
, rd
);
28977 check_insn(ctx
, ISA_MIPS32R2
);
28978 gen_store_srsgpr(rt
, rd
);
28982 generate_exception_end(ctx
, EXCP_RI
);
28986 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
28987 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28988 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
28989 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
28992 /* Arithmetic with immediate opcode */
28993 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28997 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
28999 case OPC_SLTI
: /* Set on less than with immediate opcode */
29001 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29003 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29004 case OPC_LUI
: /* OPC_AUI */
29007 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29009 case OPC_J
: /* Jump */
29011 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29012 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29015 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29016 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29018 generate_exception_end(ctx
, EXCP_RI
);
29021 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29022 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29025 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29028 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29029 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29031 generate_exception_end(ctx
, EXCP_RI
);
29034 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29035 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29038 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29041 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29044 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29046 check_insn(ctx
, ISA_MIPS32R6
);
29047 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29048 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29051 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29054 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29056 check_insn(ctx
, ISA_MIPS32R6
);
29057 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29058 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29063 check_insn(ctx
, ISA_MIPS2
);
29064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29068 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29070 case OPC_LL
: /* Load and stores */
29071 check_insn(ctx
, ISA_MIPS2
);
29072 if (ctx
->insn_flags
& INSN_R5900
) {
29073 check_insn_opc_user_only(ctx
, INSN_R5900
);
29078 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29086 gen_ld(ctx
, op
, rt
, rs
, imm
);
29090 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29095 gen_st(ctx
, op
, rt
, rs
, imm
);
29098 check_insn(ctx
, ISA_MIPS2
);
29099 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29100 if (ctx
->insn_flags
& INSN_R5900
) {
29101 check_insn_opc_user_only(ctx
, INSN_R5900
);
29103 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
29106 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29107 check_cp0_enabled(ctx
);
29108 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29109 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29110 gen_cache_operation(ctx
, rt
, rs
, imm
);
29112 /* Treat as NOP. */
29115 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29116 if (ctx
->insn_flags
& INSN_R5900
) {
29117 /* Treat as NOP. */
29119 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29120 /* Treat as NOP. */
29124 /* Floating point (COP1). */
29129 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29133 op1
= MASK_CP1(ctx
->opcode
);
29138 check_cp1_enabled(ctx
);
29139 check_insn(ctx
, ISA_MIPS32R2
);
29145 check_cp1_enabled(ctx
);
29146 gen_cp1(ctx
, op1
, rt
, rd
);
29148 #if defined(TARGET_MIPS64)
29151 check_cp1_enabled(ctx
);
29152 check_insn(ctx
, ISA_MIPS3
);
29153 check_mips_64(ctx
);
29154 gen_cp1(ctx
, op1
, rt
, rd
);
29157 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29158 check_cp1_enabled(ctx
);
29159 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29161 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29166 check_insn(ctx
, ASE_MIPS3D
);
29167 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29168 (rt
>> 2) & 0x7, imm
<< 2);
29172 check_cp1_enabled(ctx
);
29173 check_insn(ctx
, ISA_MIPS32R6
);
29174 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29178 check_cp1_enabled(ctx
);
29179 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29181 check_insn(ctx
, ASE_MIPS3D
);
29184 check_cp1_enabled(ctx
);
29185 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29186 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29187 (rt
>> 2) & 0x7, imm
<< 2);
29194 check_cp1_enabled(ctx
);
29195 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29201 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29202 check_cp1_enabled(ctx
);
29203 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29205 case R6_OPC_CMP_AF_S
:
29206 case R6_OPC_CMP_UN_S
:
29207 case R6_OPC_CMP_EQ_S
:
29208 case R6_OPC_CMP_UEQ_S
:
29209 case R6_OPC_CMP_LT_S
:
29210 case R6_OPC_CMP_ULT_S
:
29211 case R6_OPC_CMP_LE_S
:
29212 case R6_OPC_CMP_ULE_S
:
29213 case R6_OPC_CMP_SAF_S
:
29214 case R6_OPC_CMP_SUN_S
:
29215 case R6_OPC_CMP_SEQ_S
:
29216 case R6_OPC_CMP_SEUQ_S
:
29217 case R6_OPC_CMP_SLT_S
:
29218 case R6_OPC_CMP_SULT_S
:
29219 case R6_OPC_CMP_SLE_S
:
29220 case R6_OPC_CMP_SULE_S
:
29221 case R6_OPC_CMP_OR_S
:
29222 case R6_OPC_CMP_UNE_S
:
29223 case R6_OPC_CMP_NE_S
:
29224 case R6_OPC_CMP_SOR_S
:
29225 case R6_OPC_CMP_SUNE_S
:
29226 case R6_OPC_CMP_SNE_S
:
29227 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29229 case R6_OPC_CMP_AF_D
:
29230 case R6_OPC_CMP_UN_D
:
29231 case R6_OPC_CMP_EQ_D
:
29232 case R6_OPC_CMP_UEQ_D
:
29233 case R6_OPC_CMP_LT_D
:
29234 case R6_OPC_CMP_ULT_D
:
29235 case R6_OPC_CMP_LE_D
:
29236 case R6_OPC_CMP_ULE_D
:
29237 case R6_OPC_CMP_SAF_D
:
29238 case R6_OPC_CMP_SUN_D
:
29239 case R6_OPC_CMP_SEQ_D
:
29240 case R6_OPC_CMP_SEUQ_D
:
29241 case R6_OPC_CMP_SLT_D
:
29242 case R6_OPC_CMP_SULT_D
:
29243 case R6_OPC_CMP_SLE_D
:
29244 case R6_OPC_CMP_SULE_D
:
29245 case R6_OPC_CMP_OR_D
:
29246 case R6_OPC_CMP_UNE_D
:
29247 case R6_OPC_CMP_NE_D
:
29248 case R6_OPC_CMP_SOR_D
:
29249 case R6_OPC_CMP_SUNE_D
:
29250 case R6_OPC_CMP_SNE_D
:
29251 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29254 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29255 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29260 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29275 check_insn(ctx
, ASE_MSA
);
29276 gen_msa_branch(env
, ctx
, op1
);
29280 generate_exception_end(ctx
, EXCP_RI
);
29285 /* Compact branches [R6] and COP2 [non-R6] */
29286 case OPC_BC
: /* OPC_LWC2 */
29287 case OPC_BALC
: /* OPC_SWC2 */
29288 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29289 /* OPC_BC, OPC_BALC */
29290 gen_compute_compact_branch(ctx
, op
, 0, 0,
29291 sextract32(ctx
->opcode
<< 2, 0, 28));
29293 /* OPC_LWC2, OPC_SWC2 */
29294 /* COP2: Not implemented. */
29295 generate_exception_err(ctx
, EXCP_CpU
, 2);
29298 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29299 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29300 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29302 /* OPC_BEQZC, OPC_BNEZC */
29303 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29304 sextract32(ctx
->opcode
<< 2, 0, 23));
29306 /* OPC_JIC, OPC_JIALC */
29307 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29310 /* OPC_LWC2, OPC_SWC2 */
29311 /* COP2: Not implemented. */
29312 generate_exception_err(ctx
, EXCP_CpU
, 2);
29316 check_insn(ctx
, INSN_LOONGSON2F
);
29317 /* Note that these instructions use different fields. */
29318 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29322 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29323 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29324 check_cp1_enabled(ctx
);
29325 op1
= MASK_CP3(ctx
->opcode
);
29329 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29335 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29336 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29339 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29340 /* Treat as NOP. */
29343 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29357 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29358 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29362 generate_exception_end(ctx
, EXCP_RI
);
29366 generate_exception_err(ctx
, EXCP_CpU
, 1);
29370 #if defined(TARGET_MIPS64)
29371 /* MIPS64 opcodes */
29373 if (ctx
->insn_flags
& INSN_R5900
) {
29374 check_insn_opc_user_only(ctx
, INSN_R5900
);
29379 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29383 check_insn(ctx
, ISA_MIPS3
);
29384 check_mips_64(ctx
);
29385 gen_ld(ctx
, op
, rt
, rs
, imm
);
29389 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29392 check_insn(ctx
, ISA_MIPS3
);
29393 check_mips_64(ctx
);
29394 gen_st(ctx
, op
, rt
, rs
, imm
);
29397 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29398 check_insn(ctx
, ISA_MIPS3
);
29399 if (ctx
->insn_flags
& INSN_R5900
) {
29400 check_insn_opc_user_only(ctx
, INSN_R5900
);
29402 check_mips_64(ctx
);
29403 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
29405 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29406 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29407 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29408 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29411 check_insn(ctx
, ISA_MIPS3
);
29412 check_mips_64(ctx
);
29413 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29417 check_insn(ctx
, ISA_MIPS3
);
29418 check_mips_64(ctx
);
29419 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29422 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29423 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29424 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29426 MIPS_INVAL("major opcode");
29427 generate_exception_end(ctx
, EXCP_RI
);
29431 case OPC_DAUI
: /* OPC_JALX */
29432 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29433 #if defined(TARGET_MIPS64)
29435 check_mips_64(ctx
);
29437 generate_exception(ctx
, EXCP_RI
);
29438 } else if (rt
!= 0) {
29439 TCGv t0
= tcg_temp_new();
29440 gen_load_gpr(t0
, rs
);
29441 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29445 generate_exception_end(ctx
, EXCP_RI
);
29446 MIPS_INVAL("major opcode");
29450 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29451 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29452 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29455 case OPC_MSA
: /* OPC_MDMX */
29456 if (ctx
->insn_flags
& INSN_R5900
) {
29457 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29459 /* MDMX: Not implemented. */
29464 check_insn(ctx
, ISA_MIPS32R6
);
29465 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29467 default: /* Invalid */
29468 MIPS_INVAL("major opcode");
29469 generate_exception_end(ctx
, EXCP_RI
);
29474 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29476 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29477 CPUMIPSState
*env
= cs
->env_ptr
;
29479 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29480 ctx
->saved_pc
= -1;
29481 ctx
->insn_flags
= env
->insn_flags
;
29482 ctx
->CP0_Config1
= env
->CP0_Config1
;
29483 ctx
->CP0_Config2
= env
->CP0_Config2
;
29484 ctx
->CP0_Config3
= env
->CP0_Config3
;
29485 ctx
->CP0_Config5
= env
->CP0_Config5
;
29487 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29488 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29489 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29490 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29491 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29492 ctx
->PAMask
= env
->PAMask
;
29493 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29494 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29495 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29496 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29497 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29498 /* Restore delay slot state from the tb context. */
29499 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29500 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29501 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29502 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29503 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29504 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29505 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29506 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29507 restore_cpu_state(env
, ctx
);
29508 #ifdef CONFIG_USER_ONLY
29509 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29511 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29513 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29514 MO_UNALN
: MO_ALIGN
;
29516 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29520 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29524 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29526 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29528 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29532 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29533 const CPUBreakpoint
*bp
)
29535 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29537 save_cpu_state(ctx
, 1);
29538 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29539 gen_helper_raise_exception_debug(cpu_env
);
29540 /* The address covered by the breakpoint must be included in
29541 [tb->pc, tb->pc + tb->size) in order to for it to be
29542 properly cleared -- thus we increment the PC here so that
29543 the logic setting tb->size below does the right thing. */
29544 ctx
->base
.pc_next
+= 4;
29548 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
29550 CPUMIPSState
*env
= cs
->env_ptr
;
29551 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29555 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
29556 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
29557 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29558 insn_bytes
= decode_nanomips_opc(env
, ctx
);
29559 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
29560 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
29562 decode_opc(env
, ctx
);
29563 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
29564 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29565 insn_bytes
= decode_micromips_opc(env
, ctx
);
29566 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
29567 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29568 insn_bytes
= decode_mips16_opc(env
, ctx
);
29570 generate_exception_end(ctx
, EXCP_RI
);
29571 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
29575 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
29576 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
29577 MIPS_HFLAG_FBNSLOT
))) {
29578 /* force to generate branch as there is neither delay nor
29582 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
29583 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
29584 /* Force to generate branch as microMIPS R6 doesn't restrict
29585 branches in the forbidden slot. */
29590 gen_branch(ctx
, insn_bytes
);
29592 ctx
->base
.pc_next
+= insn_bytes
;
29594 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
29597 /* Execute a branch and its delay slot as a single instruction.
29598 This is what GDB expects and is consistent with what the
29599 hardware does (e.g. if a delay slot instruction faults, the
29600 reported PC is the PC of the branch). */
29601 if (ctx
->base
.singlestep_enabled
&&
29602 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
29603 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29605 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
29606 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29610 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
29612 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29614 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
29615 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
29616 gen_helper_raise_exception_debug(cpu_env
);
29618 switch (ctx
->base
.is_jmp
) {
29620 gen_save_pc(ctx
->base
.pc_next
);
29621 tcg_gen_lookup_and_goto_ptr();
29624 case DISAS_TOO_MANY
:
29625 save_cpu_state(ctx
, 0);
29626 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
29629 tcg_gen_exit_tb(NULL
, 0);
29631 case DISAS_NORETURN
:
29634 g_assert_not_reached();
29639 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
29641 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
29642 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
29645 static const TranslatorOps mips_tr_ops
= {
29646 .init_disas_context
= mips_tr_init_disas_context
,
29647 .tb_start
= mips_tr_tb_start
,
29648 .insn_start
= mips_tr_insn_start
,
29649 .breakpoint_check
= mips_tr_breakpoint_check
,
29650 .translate_insn
= mips_tr_translate_insn
,
29651 .tb_stop
= mips_tr_tb_stop
,
29652 .disas_log
= mips_tr_disas_log
,
29655 void gen_intermediate_code(CPUState
*cs
, struct TranslationBlock
*tb
)
29659 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
);
29662 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
29666 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
29668 #define printfpr(fp) \
29671 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29672 " fd:%13g fs:%13g psu: %13g\n", \
29673 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
29674 (double)(fp)->fd, \
29675 (double)(fp)->fs[FP_ENDIAN_IDX], \
29676 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
29679 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
29680 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
29681 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
29682 " fd:%13g fs:%13g psu:%13g\n", \
29683 tmp.w[FP_ENDIAN_IDX], tmp.d, \
29685 (double)tmp.fs[FP_ENDIAN_IDX], \
29686 (double)tmp.fs[!FP_ENDIAN_IDX]); \
29691 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
29692 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
29693 get_float_exception_flags(&env
->active_fpu
.fp_status
));
29694 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
29695 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
29696 printfpr(&env
->active_fpu
.fpr
[i
]);
29702 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
29705 MIPSCPU
*cpu
= MIPS_CPU(cs
);
29706 CPUMIPSState
*env
= &cpu
->env
;
29709 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
29710 " LO=0x" TARGET_FMT_lx
" ds %04x "
29711 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
29712 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
29713 env
->hflags
, env
->btarget
, env
->bcond
);
29714 for (i
= 0; i
< 32; i
++) {
29716 cpu_fprintf(f
, "GPR%02d:", i
);
29717 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
29719 cpu_fprintf(f
, "\n");
29722 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
29723 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
29724 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
29726 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
29727 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
29728 env
->CP0_Config2
, env
->CP0_Config3
);
29729 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
29730 env
->CP0_Config4
, env
->CP0_Config5
);
29731 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
29732 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
29736 void mips_tcg_init(void)
29741 for (i
= 1; i
< 32; i
++)
29742 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29743 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
29746 for (i
= 0; i
< 32; i
++) {
29747 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
29749 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
29750 /* The scalar floating-point unit (FPU) registers are mapped on
29751 * the MSA vector registers. */
29752 fpu_f64
[i
] = msa_wr_d
[i
* 2];
29753 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
29754 msa_wr_d
[i
* 2 + 1] =
29755 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
29758 cpu_PC
= tcg_global_mem_new(cpu_env
,
29759 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
29760 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
29761 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
29762 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
29764 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
29765 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
29768 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
29769 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
29771 bcond
= tcg_global_mem_new(cpu_env
,
29772 offsetof(CPUMIPSState
, bcond
), "bcond");
29773 btarget
= tcg_global_mem_new(cpu_env
,
29774 offsetof(CPUMIPSState
, btarget
), "btarget");
29775 hflags
= tcg_global_mem_new_i32(cpu_env
,
29776 offsetof(CPUMIPSState
, hflags
), "hflags");
29778 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
29779 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
29781 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
29782 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
29784 #if !defined(TARGET_MIPS64)
29785 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
29786 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
29787 offsetof(CPUMIPSState
,
29788 active_tc
.mxu_gpr
[i
]),
29792 mxu_CR
= tcg_global_mem_new(cpu_env
,
29793 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
29794 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
29798 #include "translate_init.inc.c"
29800 void cpu_mips_realize_env(CPUMIPSState
*env
)
29802 env
->exception_base
= (int32_t)0xBFC00000;
29804 #ifndef CONFIG_USER_ONLY
29805 mmu_init(env
, env
->cpu_model
);
29807 fpu_init(env
, env
->cpu_model
);
29808 mvp_init(env
, env
->cpu_model
);
29811 bool cpu_supports_cps_smp(const char *cpu_type
)
29813 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29814 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
29817 bool cpu_supports_isa(const char *cpu_type
, unsigned int isa
)
29819 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
29820 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
29823 void cpu_set_exception_base(int vp_index
, target_ulong address
)
29825 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
29826 vp
->env
.exception_base
= address
;
29829 void cpu_state_reset(CPUMIPSState
*env
)
29831 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
29832 CPUState
*cs
= CPU(cpu
);
29834 /* Reset registers to their default values */
29835 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
29836 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
29837 #ifdef TARGET_WORDS_BIGENDIAN
29838 env
->CP0_Config0
|= (1 << CP0C0_BE
);
29840 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
29841 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
29842 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
29843 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
29844 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
29845 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
29846 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
29847 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
29848 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
29849 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
29850 << env
->cpu_model
->CP0_LLAddr_shift
;
29851 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
29852 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
29853 env
->CCRes
= env
->cpu_model
->CCRes
;
29854 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
29855 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
29856 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
29857 env
->current_tc
= 0;
29858 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
29859 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
29860 #if defined(TARGET_MIPS64)
29861 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
29862 env
->SEGMask
|= 3ULL << 62;
29865 env
->PABITS
= env
->cpu_model
->PABITS
;
29866 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
29867 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
29868 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
29869 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
29870 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
29871 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
29872 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
29873 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
29874 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
29875 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
29876 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
29877 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
29878 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
29879 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
29880 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
29881 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
29882 env
->msair
= env
->cpu_model
->MSAIR
;
29883 env
->insn_flags
= env
->cpu_model
->insn_flags
;
29885 #if defined(CONFIG_USER_ONLY)
29886 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
29887 # ifdef TARGET_MIPS64
29888 /* Enable 64-bit register mode. */
29889 env
->CP0_Status
|= (1 << CP0St_PX
);
29891 # ifdef TARGET_ABI_MIPSN64
29892 /* Enable 64-bit address mode. */
29893 env
->CP0_Status
|= (1 << CP0St_UX
);
29895 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
29896 hardware registers. */
29897 env
->CP0_HWREna
|= 0x0000000F;
29898 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
29899 env
->CP0_Status
|= (1 << CP0St_CU1
);
29901 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
29902 env
->CP0_Status
|= (1 << CP0St_MX
);
29904 # if defined(TARGET_MIPS64)
29905 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
29906 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
29907 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
29908 env
->CP0_Status
|= (1 << CP0St_FR
);
29912 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
29913 /* If the exception was raised from a delay slot,
29914 come back to the jump. */
29915 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
29916 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
29918 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
29920 env
->active_tc
.PC
= env
->exception_base
;
29921 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
29922 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
29923 env
->CP0_Wired
= 0;
29924 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
29925 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
29926 if (mips_um_ksegs_enabled()) {
29927 env
->CP0_EBase
|= 0x40000000;
29929 env
->CP0_EBase
|= (int32_t)0x80000000;
29931 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
29932 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
29934 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
29936 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
29937 /* vectored interrupts not implemented, timer on int 7,
29938 no performance counters. */
29939 env
->CP0_IntCtl
= 0xe0000000;
29943 for (i
= 0; i
< 7; i
++) {
29944 env
->CP0_WatchLo
[i
] = 0;
29945 env
->CP0_WatchHi
[i
] = 0x80000000;
29947 env
->CP0_WatchLo
[7] = 0;
29948 env
->CP0_WatchHi
[7] = 0;
29950 /* Count register increments in debug mode, EJTAG version 1 */
29951 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
29953 cpu_mips_store_count(env
, 1);
29955 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
29958 /* Only TC0 on VPE 0 starts as active. */
29959 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
29960 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
29961 env
->tcs
[i
].CP0_TCHalt
= 1;
29963 env
->active_tc
.CP0_TCHalt
= 1;
29966 if (cs
->cpu_index
== 0) {
29967 /* VPE0 starts up enabled. */
29968 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
29969 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
29971 /* TC0 starts up unhalted. */
29973 env
->active_tc
.CP0_TCHalt
= 0;
29974 env
->tcs
[0].CP0_TCHalt
= 0;
29975 /* With thread 0 active. */
29976 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
29977 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
29982 * Configure default legacy segmentation control. We use this regardless of
29983 * whether segmentation control is presented to the guest.
29985 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
29986 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
29987 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
29988 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
29989 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
29990 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29992 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
29993 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
29994 (3 << CP0SC_C
)) << 16;
29995 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
29996 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
29997 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
29998 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
29999 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30000 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30001 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30002 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30004 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30005 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30006 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30007 env
->CP0_Status
|= (1 << CP0St_FR
);
30010 if (env
->insn_flags
& ISA_MIPS32R6
) {
30012 env
->CP0_PWSize
= 0x40;
30018 env
->CP0_PWField
= 0x0C30C302;
30025 env
->CP0_PWField
= 0x02;
30028 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30029 /* microMIPS on reset when Config3.ISA is 3 */
30030 env
->hflags
|= MIPS_HFLAG_M16
;
30034 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30038 compute_hflags(env
);
30039 restore_fp_status(env
);
30040 restore_pamask(env
);
30041 cs
->exception_index
= EXCP_NONE
;
30043 if (semihosting_get_argc()) {
30044 /* UHI interface can be used to obtain argc and argv */
30045 env
->active_tc
.gpr
[4] = -1;
30049 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30050 target_ulong
*data
)
30052 env
->active_tc
.PC
= data
[0];
30053 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30054 env
->hflags
|= data
[1];
30055 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30056 case MIPS_HFLAG_BR
:
30058 case MIPS_HFLAG_BC
:
30059 case MIPS_HFLAG_BL
:
30061 env
->btarget
= data
[2];