2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "hw/semihosting/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
41 #include "qemu/qemu-print.h"
43 #define MIPS_DEBUG_DISAS 0
45 /* MIPS major opcodes */
46 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
49 /* indirect opcode tables */
50 OPC_SPECIAL
= (0x00 << 26),
51 OPC_REGIMM
= (0x01 << 26),
52 OPC_CP0
= (0x10 << 26),
53 OPC_CP1
= (0x11 << 26),
54 OPC_CP2
= (0x12 << 26),
55 OPC_CP3
= (0x13 << 26),
56 OPC_SPECIAL2
= (0x1C << 26),
57 OPC_SPECIAL3
= (0x1F << 26),
58 /* arithmetic with immediate */
59 OPC_ADDI
= (0x08 << 26),
60 OPC_ADDIU
= (0x09 << 26),
61 OPC_SLTI
= (0x0A << 26),
62 OPC_SLTIU
= (0x0B << 26),
63 /* logic with immediate */
64 OPC_ANDI
= (0x0C << 26),
65 OPC_ORI
= (0x0D << 26),
66 OPC_XORI
= (0x0E << 26),
67 OPC_LUI
= (0x0F << 26),
68 /* arithmetic with immediate */
69 OPC_DADDI
= (0x18 << 26),
70 OPC_DADDIU
= (0x19 << 26),
71 /* Jump and branches */
73 OPC_JAL
= (0x03 << 26),
74 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
75 OPC_BEQL
= (0x14 << 26),
76 OPC_BNE
= (0x05 << 26),
77 OPC_BNEL
= (0x15 << 26),
78 OPC_BLEZ
= (0x06 << 26),
79 OPC_BLEZL
= (0x16 << 26),
80 OPC_BGTZ
= (0x07 << 26),
81 OPC_BGTZL
= (0x17 << 26),
82 OPC_JALX
= (0x1D << 26),
83 OPC_DAUI
= (0x1D << 26),
85 OPC_LDL
= (0x1A << 26),
86 OPC_LDR
= (0x1B << 26),
87 OPC_LB
= (0x20 << 26),
88 OPC_LH
= (0x21 << 26),
89 OPC_LWL
= (0x22 << 26),
90 OPC_LW
= (0x23 << 26),
91 OPC_LWPC
= OPC_LW
| 0x5,
92 OPC_LBU
= (0x24 << 26),
93 OPC_LHU
= (0x25 << 26),
94 OPC_LWR
= (0x26 << 26),
95 OPC_LWU
= (0x27 << 26),
96 OPC_SB
= (0x28 << 26),
97 OPC_SH
= (0x29 << 26),
98 OPC_SWL
= (0x2A << 26),
99 OPC_SW
= (0x2B << 26),
100 OPC_SDL
= (0x2C << 26),
101 OPC_SDR
= (0x2D << 26),
102 OPC_SWR
= (0x2E << 26),
103 OPC_LL
= (0x30 << 26),
104 OPC_LLD
= (0x34 << 26),
105 OPC_LD
= (0x37 << 26),
106 OPC_LDPC
= OPC_LD
| 0x5,
107 OPC_SC
= (0x38 << 26),
108 OPC_SCD
= (0x3C << 26),
109 OPC_SD
= (0x3F << 26),
110 /* Floating point load/store */
111 OPC_LWC1
= (0x31 << 26),
112 OPC_LWC2
= (0x32 << 26),
113 OPC_LDC1
= (0x35 << 26),
114 OPC_LDC2
= (0x36 << 26),
115 OPC_SWC1
= (0x39 << 26),
116 OPC_SWC2
= (0x3A << 26),
117 OPC_SDC1
= (0x3D << 26),
118 OPC_SDC2
= (0x3E << 26),
119 /* Compact Branches */
120 OPC_BLEZALC
= (0x06 << 26),
121 OPC_BGEZALC
= (0x06 << 26),
122 OPC_BGEUC
= (0x06 << 26),
123 OPC_BGTZALC
= (0x07 << 26),
124 OPC_BLTZALC
= (0x07 << 26),
125 OPC_BLTUC
= (0x07 << 26),
126 OPC_BOVC
= (0x08 << 26),
127 OPC_BEQZALC
= (0x08 << 26),
128 OPC_BEQC
= (0x08 << 26),
129 OPC_BLEZC
= (0x16 << 26),
130 OPC_BGEZC
= (0x16 << 26),
131 OPC_BGEC
= (0x16 << 26),
132 OPC_BGTZC
= (0x17 << 26),
133 OPC_BLTZC
= (0x17 << 26),
134 OPC_BLTC
= (0x17 << 26),
135 OPC_BNVC
= (0x18 << 26),
136 OPC_BNEZALC
= (0x18 << 26),
137 OPC_BNEC
= (0x18 << 26),
138 OPC_BC
= (0x32 << 26),
139 OPC_BEQZC
= (0x36 << 26),
140 OPC_JIC
= (0x36 << 26),
141 OPC_BALC
= (0x3A << 26),
142 OPC_BNEZC
= (0x3E << 26),
143 OPC_JIALC
= (0x3E << 26),
144 /* MDMX ASE specific */
145 OPC_MDMX
= (0x1E << 26),
146 /* MSA ASE, same as MDMX */
148 /* Cache and prefetch */
149 OPC_CACHE
= (0x2F << 26),
150 OPC_PREF
= (0x33 << 26),
151 /* PC-relative address computation / loads */
152 OPC_PCREL
= (0x3B << 26),
155 /* PC-relative address computation / loads */
156 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
157 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
159 /* Instructions determined by bits 19 and 20 */
160 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
161 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
162 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
164 /* Instructions determined by bits 16 ... 20 */
165 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
166 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
169 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
172 /* MIPS special opcodes */
173 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
177 OPC_SLL
= 0x00 | OPC_SPECIAL
,
178 /* NOP is SLL r0, r0, 0 */
179 /* SSNOP is SLL r0, r0, 1 */
180 /* EHB is SLL r0, r0, 3 */
181 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
182 OPC_ROTR
= OPC_SRL
| (1 << 21),
183 OPC_SRA
= 0x03 | OPC_SPECIAL
,
184 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
185 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
186 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
187 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
188 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
189 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
190 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
191 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
192 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
193 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
194 OPC_DROTR
= OPC_DSRL
| (1 << 21),
195 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
196 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
197 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
198 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
199 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
200 /* Multiplication / division */
201 OPC_MULT
= 0x18 | OPC_SPECIAL
,
202 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
203 OPC_DIV
= 0x1A | OPC_SPECIAL
,
204 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
205 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
206 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
207 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
208 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
210 /* 2 registers arithmetic / logic */
211 OPC_ADD
= 0x20 | OPC_SPECIAL
,
212 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
213 OPC_SUB
= 0x22 | OPC_SPECIAL
,
214 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
215 OPC_AND
= 0x24 | OPC_SPECIAL
,
216 OPC_OR
= 0x25 | OPC_SPECIAL
,
217 OPC_XOR
= 0x26 | OPC_SPECIAL
,
218 OPC_NOR
= 0x27 | OPC_SPECIAL
,
219 OPC_SLT
= 0x2A | OPC_SPECIAL
,
220 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
221 OPC_DADD
= 0x2C | OPC_SPECIAL
,
222 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
223 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
224 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
226 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
227 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
229 OPC_TGE
= 0x30 | OPC_SPECIAL
,
230 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
231 OPC_TLT
= 0x32 | OPC_SPECIAL
,
232 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
233 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
234 OPC_TNE
= 0x36 | OPC_SPECIAL
,
235 /* HI / LO registers load & stores */
236 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
237 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
238 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
239 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
240 /* Conditional moves */
241 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
242 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
244 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
245 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
247 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
250 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
251 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
252 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
253 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
254 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
256 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
257 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
258 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
259 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
262 /* R6 Multiply and Divide instructions have the same Opcode
263 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
264 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
267 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
268 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
269 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
270 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
271 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
272 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
273 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
274 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
276 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
277 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
278 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
279 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
280 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
281 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
282 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
283 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
285 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
286 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
287 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
288 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
289 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
291 OPC_LSA
= 0x05 | OPC_SPECIAL
,
292 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
295 /* Multiplication variants of the vr54xx. */
296 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
299 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
300 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
302 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
304 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
306 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
307 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
308 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
309 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
310 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
311 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
312 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
315 /* REGIMM (rt field) opcodes */
316 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
319 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
320 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
321 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
322 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
323 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
324 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
325 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
326 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
327 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
328 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
329 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
330 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
331 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
332 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
333 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
334 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
336 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
337 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
340 /* Special2 opcodes */
341 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
344 /* Multiply & xxx operations */
345 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
346 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
347 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
348 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
349 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
351 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
352 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
353 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
354 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
355 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
356 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
357 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
358 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
359 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
360 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
361 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
362 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
364 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
365 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
366 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
367 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
369 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
372 /* Special3 opcodes */
373 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
376 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
377 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
378 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
379 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
380 OPC_INS
= 0x04 | OPC_SPECIAL3
,
381 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
382 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
383 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
384 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
385 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
386 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
387 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
388 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
391 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
392 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
393 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
394 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
395 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
396 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
397 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
398 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
399 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
400 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
401 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
402 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
405 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
406 /* MIPS DSP Arithmetic */
407 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
408 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
409 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
410 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
411 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
412 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
413 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
414 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
415 /* MIPS DSP GPR-Based Shift Sub-class */
416 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
417 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
418 /* MIPS DSP Multiply Sub-class insns */
419 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
420 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
421 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
422 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
423 /* DSP Bit/Manipulation Sub-class */
424 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
425 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
426 /* MIPS DSP Append Sub-class */
427 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
428 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
429 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
430 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
431 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
434 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
435 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
436 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
437 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
438 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
439 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
440 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
441 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
442 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
443 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
444 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
445 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
446 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
447 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
448 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
449 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
452 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
453 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
454 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
455 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
456 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
457 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
461 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
464 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
465 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
466 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
467 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
468 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
469 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
470 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
471 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
475 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
478 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
479 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
480 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
481 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
482 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
483 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
487 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
488 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
491 /* MIPS DSP REGIMM opcodes */
493 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
494 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
497 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
500 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
501 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
502 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
503 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
506 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
508 /* MIPS DSP Arithmetic Sub-class */
509 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
510 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
511 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
516 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
517 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
522 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
523 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
525 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
526 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
527 /* MIPS DSP Multiply Sub-class insns */
528 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
529 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
530 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
533 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
536 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
537 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
539 /* MIPS DSP Arithmetic Sub-class */
540 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
541 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
542 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
551 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
552 /* MIPS DSP Multiply Sub-class insns */
553 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
554 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
555 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
556 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
559 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
561 /* MIPS DSP Arithmetic Sub-class */
562 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
563 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
564 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
574 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
575 /* DSP Bit/Manipulation Sub-class */
576 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
577 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
578 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
580 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
583 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
585 /* MIPS DSP Arithmetic Sub-class */
586 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
587 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
588 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
593 /* DSP Compare-Pick Sub-class */
594 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
595 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
596 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
608 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
611 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
613 /* MIPS DSP GPR-Based Shift Sub-class */
614 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
615 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
616 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
635 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
638 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
640 /* MIPS DSP Multiply Sub-class insns */
641 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
642 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
643 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
662 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
665 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
667 /* DSP Bit/Manipulation Sub-class */
668 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
671 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Append Sub-class */
674 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
675 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
676 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
679 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
681 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
682 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
683 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
684 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
693 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
694 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
695 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
696 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
697 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
698 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
701 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
703 /* MIPS DSP Arithmetic Sub-class */
704 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
705 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
706 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
720 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
721 /* DSP Bit/Manipulation Sub-class */
722 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
723 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
724 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
727 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
730 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
732 /* MIPS DSP Multiply Sub-class insns */
733 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
734 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
735 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
737 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
738 /* MIPS DSP Arithmetic Sub-class */
739 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
740 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
741 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
750 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
759 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
762 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
764 /* DSP Compare-Pick Sub-class */
765 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
766 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
767 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
784 /* MIPS DSP Arithmetic Sub-class */
785 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
786 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
787 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
795 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* DSP Append Sub-class */
798 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
799 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
800 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
801 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
804 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
806 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
807 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
808 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
809 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
827 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
830 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
832 /* DSP Bit/Manipulation Sub-class */
833 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
836 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
838 /* MIPS DSP Multiply Sub-class insns */
839 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
840 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
841 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
867 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
869 /* MIPS DSP GPR-Based Shift Sub-class */
870 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
871 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
872 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
898 /* Coprocessor 0 (rs field) */
899 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
902 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
903 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
904 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
905 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
906 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
907 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
908 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
909 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
910 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
911 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
912 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
913 OPC_C0
= (0x10 << 21) | OPC_CP0
,
914 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
915 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
916 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
917 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
918 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
919 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
920 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
921 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
922 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
923 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
924 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
925 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
926 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
927 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
928 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
932 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
935 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
936 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
937 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
938 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
939 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
940 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
941 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
942 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
945 /* Coprocessor 0 (with rs == C0) */
946 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
949 OPC_TLBR
= 0x01 | OPC_C0
,
950 OPC_TLBWI
= 0x02 | OPC_C0
,
951 OPC_TLBINV
= 0x03 | OPC_C0
,
952 OPC_TLBINVF
= 0x04 | OPC_C0
,
953 OPC_TLBWR
= 0x06 | OPC_C0
,
954 OPC_TLBP
= 0x08 | OPC_C0
,
955 OPC_RFE
= 0x10 | OPC_C0
,
956 OPC_ERET
= 0x18 | OPC_C0
,
957 OPC_DERET
= 0x1F | OPC_C0
,
958 OPC_WAIT
= 0x20 | OPC_C0
,
961 /* Coprocessor 1 (rs field) */
962 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
964 /* Values for the fmt field in FP instructions */
966 /* 0 - 15 are reserved */
967 FMT_S
= 16, /* single fp */
968 FMT_D
= 17, /* double fp */
969 FMT_E
= 18, /* extended fp */
970 FMT_Q
= 19, /* quad fp */
971 FMT_W
= 20, /* 32-bit fixed */
972 FMT_L
= 21, /* 64-bit fixed */
973 FMT_PS
= 22, /* paired single fp */
974 /* 23 - 31 are reserved */
978 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
979 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
980 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
981 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
982 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
983 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
984 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
985 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
986 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
987 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
988 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
989 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
990 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
991 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
992 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
993 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
994 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
995 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
996 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
997 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
998 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
999 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
1000 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1001 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1002 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1003 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1004 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1005 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1006 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1007 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1010 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1011 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1014 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1015 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1016 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1017 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1021 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1022 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1026 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1027 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1030 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1033 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1034 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1035 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1036 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1037 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1038 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1039 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1040 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1041 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1042 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1043 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1046 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1049 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1050 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1051 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1056 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1058 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1059 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1060 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1065 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1067 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1068 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1069 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1070 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1071 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1072 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1073 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1074 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1076 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1077 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1078 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1083 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1085 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1086 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1087 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1090 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1092 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1093 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1094 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1097 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1099 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1100 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1101 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1104 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1106 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1107 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1108 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1111 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1113 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1114 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1115 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1118 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1120 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1121 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1122 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1125 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1127 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1128 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1129 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1132 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1134 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1135 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1136 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1139 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1143 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1146 OPC_LWXC1
= 0x00 | OPC_CP3
,
1147 OPC_LDXC1
= 0x01 | OPC_CP3
,
1148 OPC_LUXC1
= 0x05 | OPC_CP3
,
1149 OPC_SWXC1
= 0x08 | OPC_CP3
,
1150 OPC_SDXC1
= 0x09 | OPC_CP3
,
1151 OPC_SUXC1
= 0x0D | OPC_CP3
,
1152 OPC_PREFX
= 0x0F | OPC_CP3
,
1153 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1154 OPC_MADD_S
= 0x20 | OPC_CP3
,
1155 OPC_MADD_D
= 0x21 | OPC_CP3
,
1156 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1157 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1158 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1159 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1160 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1161 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1162 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1163 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1164 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1165 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1169 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1171 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1172 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1173 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1174 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1175 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1176 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1177 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1178 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1179 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1180 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1181 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1182 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1183 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1184 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1185 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1186 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1187 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1188 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1189 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1190 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1191 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1193 /* MI10 instruction */
1194 OPC_LD_B
= (0x20) | OPC_MSA
,
1195 OPC_LD_H
= (0x21) | OPC_MSA
,
1196 OPC_LD_W
= (0x22) | OPC_MSA
,
1197 OPC_LD_D
= (0x23) | OPC_MSA
,
1198 OPC_ST_B
= (0x24) | OPC_MSA
,
1199 OPC_ST_H
= (0x25) | OPC_MSA
,
1200 OPC_ST_W
= (0x26) | OPC_MSA
,
1201 OPC_ST_D
= (0x27) | OPC_MSA
,
1205 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1206 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1207 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1208 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1209 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1210 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1211 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1212 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1213 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1214 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1215 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1216 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1217 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1219 /* I8 instruction */
1220 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1221 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1222 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1223 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1224 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1225 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1226 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1227 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1228 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1229 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1231 /* VEC/2R/2RF instruction */
1232 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1233 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1234 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1235 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1236 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1237 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1238 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1240 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1241 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1243 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1244 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1245 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1246 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1247 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1249 /* 2RF instruction df(bit 16) = _w, _d */
1250 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1251 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1252 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1253 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1254 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1255 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1256 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1257 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1258 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1259 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1260 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1261 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1262 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1263 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1264 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1265 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1267 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1268 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1269 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1270 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1271 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1272 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1273 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1274 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1275 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1276 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1277 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1278 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1279 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1280 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1281 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1282 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1283 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1284 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1285 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1286 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1287 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1288 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1289 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1290 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1291 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1292 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1293 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1294 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1295 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1296 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1297 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1298 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1299 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1300 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1301 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1302 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1303 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1304 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1305 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1306 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1307 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1308 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1309 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1310 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1311 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1312 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1313 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1314 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1315 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1316 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1317 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1318 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1319 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1320 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1321 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1322 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1323 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1324 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1325 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1326 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1327 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1328 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1329 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1330 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1332 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1333 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1334 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1335 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1336 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1337 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1338 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1339 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1341 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1343 /* 3RF instruction _df(bit 21) = _w, _d */
1344 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1345 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1346 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1349 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1350 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1351 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1352 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1353 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1354 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1355 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1356 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1357 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1358 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1359 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1360 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1361 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1362 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1364 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1365 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1367 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1368 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1369 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1370 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1371 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1372 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1373 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1376 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1377 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1378 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1379 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1380 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1381 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1382 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1383 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1384 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1386 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1387 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1388 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1389 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1390 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1391 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1392 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1393 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1394 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1395 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1396 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1397 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1398 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1404 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1405 * ============================================
1408 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1409 * instructions set. It is designed to fit the needs of signal, graphical and
1410 * video processing applications. MXU instruction set is used in Xburst family
1411 * of microprocessors by Ingenic.
1413 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1414 * the control register.
1417 * The notation used in MXU assembler mnemonics
1418 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1420 * Register operands:
1422 * XRa, XRb, XRc, XRd - MXU registers
1423 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1425 * Non-register operands:
1427 * aptn1 - 1-bit accumulate add/subtract pattern
1428 * aptn2 - 2-bit accumulate add/subtract pattern
1429 * eptn2 - 2-bit execute add/subtract pattern
1430 * optn2 - 2-bit operand pattern
1431 * optn3 - 3-bit operand pattern
1432 * sft4 - 4-bit shift amount
1433 * strd2 - 2-bit stride amount
1437 * Level of parallelism: Operand size:
1438 * S - single operation at a time 32 - word
1439 * D - two operations in parallel 16 - half word
1440 * Q - four operations in parallel 8 - byte
1444 * ADD - Add or subtract
1445 * ADDC - Add with carry-in
1447 * ASUM - Sum together then accumulate (add or subtract)
1448 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1449 * AVG - Average between 2 operands
1450 * ABD - Absolute difference
1452 * AND - Logical bitwise 'and' operation
1454 * EXTR - Extract bits
1455 * I2M - Move from GPR register to MXU register
1456 * LDD - Load data from memory to XRF
1457 * LDI - Load data from memory to XRF (and increase the address base)
1458 * LUI - Load unsigned immediate
1460 * MULU - Unsigned multiply
1461 * MADD - 64-bit operand add 32x32 product
1462 * MSUB - 64-bit operand subtract 32x32 product
1463 * MAC - Multiply and accumulate (add or subtract)
1464 * MAD - Multiply and add or subtract
1465 * MAX - Maximum between 2 operands
1466 * MIN - Minimum between 2 operands
1467 * M2I - Move from MXU register to GPR register
1468 * MOVZ - Move if zero
1469 * MOVN - Move if non-zero
1470 * NOR - Logical bitwise 'nor' operation
1471 * OR - Logical bitwise 'or' operation
1472 * STD - Store data from XRF to memory
1473 * SDI - Store data from XRF to memory (and increase the address base)
1474 * SLT - Set of less than comparison
1475 * SAD - Sum of absolute differences
1476 * SLL - Logical shift left
1477 * SLR - Logical shift right
1478 * SAR - Arithmetic shift right
1481 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1482 * XOR - Logical bitwise 'exclusive or' operation
1486 * E - Expand results
1487 * F - Fixed point multiplication
1488 * L - Low part result
1489 * R - Doing rounding
1490 * V - Variable instead of immediate
1491 * W - Combine above L and V
1494 * The list of MXU instructions grouped by functionality
1495 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1497 * Load/Store instructions Multiplication instructions
1498 * ----------------------- ---------------------------
1500 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1501 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1502 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1503 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1504 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1505 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1506 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1507 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1508 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1509 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1510 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1511 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1513 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1514 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1515 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1516 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1517 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1518 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1519 * S16SDI XRa, Rb, s10, eptn2
1520 * S8LDD XRa, Rb, s8, eptn3
1521 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1522 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1523 * S8SDI XRa, Rb, s8, eptn3
1524 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1525 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1526 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1527 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1528 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1529 * S32CPS XRa, XRb, XRc
1530 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1531 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1532 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1533 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1534 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1535 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1536 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1537 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1538 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1539 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1540 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1541 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1542 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1543 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1544 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1545 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1546 * Q8SLT XRa, XRb, XRc
1547 * Q8SLTU XRa, XRb, XRc
1548 * Q8MOVZ XRa, XRb, XRc Shift instructions
1549 * Q8MOVN XRa, XRb, XRc ------------------
1551 * D32SLL XRa, XRb, XRc, XRd, sft4
1552 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1553 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1554 * D32SARL XRa, XRb, XRc, sft4
1555 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1556 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1557 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1558 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1559 * Q16SLL XRa, XRb, XRc, XRd, sft4
1560 * Q16SLR XRa, XRb, XRc, XRd, sft4
1561 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1562 * ------------------------- Q16SLLV XRa, XRb, Rb
1563 * Q16SLRV XRa, XRb, Rb
1564 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1565 * S32ALN XRa, XRb, XRc, Rb
1566 * S32ALNI XRa, XRb, XRc, s3
1567 * S32LUI XRa, s8, optn3 Move instructions
1568 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1569 * S32EXTRV XRa, XRb, Rs, Rt
1570 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1571 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1574 * The opcode organization of MXU instructions
1575 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1577 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1578 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1579 * other bits up to the instruction level is as follows:
1584 * ┌─ 000000 ─ OPC_MXU_S32MADD
1585 * ├─ 000001 ─ OPC_MXU_S32MADDU
1586 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1589 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1590 * │ ├─ 001 ─ OPC_MXU_S32MIN
1591 * │ ├─ 010 ─ OPC_MXU_D16MAX
1592 * │ ├─ 011 ─ OPC_MXU_D16MIN
1593 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1594 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1595 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1596 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1597 * ├─ 000100 ─ OPC_MXU_S32MSUB
1598 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1599 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1600 * │ ├─ 001 ─ OPC_MXU_D16SLT
1601 * │ ├─ 010 ─ OPC_MXU_D16AVG
1602 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1603 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1604 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1605 * │ └─ 111 ─ OPC_MXU_Q8ADD
1608 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1609 * │ ├─ 010 ─ OPC_MXU_D16CPS
1610 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1611 * │ └─ 110 ─ OPC_MXU_Q16SAT
1612 * ├─ 001000 ─ OPC_MXU_D16MUL
1614 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1615 * │ └─ 01 ─ OPC_MXU_D16MULE
1616 * ├─ 001010 ─ OPC_MXU_D16MAC
1617 * ├─ 001011 ─ OPC_MXU_D16MACF
1618 * ├─ 001100 ─ OPC_MXU_D16MADL
1619 * ├─ 001101 ─ OPC_MXU_S16MAD
1620 * ├─ 001110 ─ OPC_MXU_Q16ADD
1621 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1622 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1623 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1626 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1627 * │ └─ 1 ─ OPC_MXU_S32STDR
1630 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1631 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1634 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1635 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1638 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1639 * │ └─ 1 ─ OPC_MXU_S32LDIR
1642 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1643 * │ └─ 1 ─ OPC_MXU_S32SDIR
1646 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1647 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1650 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1651 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1652 * ├─ 011000 ─ OPC_MXU_D32ADD
1654 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1655 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1656 * │ └─ 10 ─ OPC_MXU_D32ASUM
1657 * ├─ 011010 ─ <not assigned>
1659 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1660 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1661 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1664 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1665 * │ ├─ 01 ─ OPC_MXU_D8SUM
1666 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1667 * ├─ 011110 ─ <not assigned>
1668 * ├─ 011111 ─ <not assigned>
1669 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1670 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1671 * ├─ 100010 ─ OPC_MXU_S8LDD
1672 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1673 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1674 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1675 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1676 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1679 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1680 * │ ├─ 001 ─ OPC_MXU_S32ALN
1681 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1682 * │ ├─ 011 ─ OPC_MXU_S32LUI
1683 * │ ├─ 100 ─ OPC_MXU_S32NOR
1684 * │ ├─ 101 ─ OPC_MXU_S32AND
1685 * │ ├─ 110 ─ OPC_MXU_S32OR
1686 * │ └─ 111 ─ OPC_MXU_S32XOR
1689 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1690 * │ ├─ 001 ─ OPC_MXU_LXH
1691 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1692 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1693 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1694 * ├─ 101100 ─ OPC_MXU_S16LDI
1695 * ├─ 101101 ─ OPC_MXU_S16SDI
1696 * ├─ 101110 ─ OPC_MXU_S32M2I
1697 * ├─ 101111 ─ OPC_MXU_S32I2M
1698 * ├─ 110000 ─ OPC_MXU_D32SLL
1699 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1700 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1701 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1702 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1703 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1704 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1705 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1707 * ├─ 110111 ─ OPC_MXU_Q16SAR
1709 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1710 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1713 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1714 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1715 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1716 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1717 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1718 * │ └─ 101 ─ OPC_MXU_S32MOVN
1721 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1722 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1723 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1724 * ├─ 111100 ─ OPC_MXU_Q8MADL
1725 * ├─ 111101 ─ OPC_MXU_S32SFL
1726 * ├─ 111110 ─ OPC_MXU_Q8SAD
1727 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1732 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1733 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1737 OPC_MXU_S32MADD
= 0x00,
1738 OPC_MXU_S32MADDU
= 0x01,
1739 OPC__MXU_MUL
= 0x02,
1740 OPC_MXU__POOL00
= 0x03,
1741 OPC_MXU_S32MSUB
= 0x04,
1742 OPC_MXU_S32MSUBU
= 0x05,
1743 OPC_MXU__POOL01
= 0x06,
1744 OPC_MXU__POOL02
= 0x07,
1745 OPC_MXU_D16MUL
= 0x08,
1746 OPC_MXU__POOL03
= 0x09,
1747 OPC_MXU_D16MAC
= 0x0A,
1748 OPC_MXU_D16MACF
= 0x0B,
1749 OPC_MXU_D16MADL
= 0x0C,
1750 OPC_MXU_S16MAD
= 0x0D,
1751 OPC_MXU_Q16ADD
= 0x0E,
1752 OPC_MXU_D16MACE
= 0x0F,
1753 OPC_MXU__POOL04
= 0x10,
1754 OPC_MXU__POOL05
= 0x11,
1755 OPC_MXU__POOL06
= 0x12,
1756 OPC_MXU__POOL07
= 0x13,
1757 OPC_MXU__POOL08
= 0x14,
1758 OPC_MXU__POOL09
= 0x15,
1759 OPC_MXU__POOL10
= 0x16,
1760 OPC_MXU__POOL11
= 0x17,
1761 OPC_MXU_D32ADD
= 0x18,
1762 OPC_MXU__POOL12
= 0x19,
1763 /* not assigned 0x1A */
1764 OPC_MXU__POOL13
= 0x1B,
1765 OPC_MXU__POOL14
= 0x1C,
1766 OPC_MXU_Q8ACCE
= 0x1D,
1767 /* not assigned 0x1E */
1768 /* not assigned 0x1F */
1769 /* not assigned 0x20 */
1770 /* not assigned 0x21 */
1771 OPC_MXU_S8LDD
= 0x22,
1772 OPC_MXU_S8STD
= 0x23,
1773 OPC_MXU_S8LDI
= 0x24,
1774 OPC_MXU_S8SDI
= 0x25,
1775 OPC_MXU__POOL15
= 0x26,
1776 OPC_MXU__POOL16
= 0x27,
1777 OPC_MXU__POOL17
= 0x28,
1778 /* not assigned 0x29 */
1779 OPC_MXU_S16LDD
= 0x2A,
1780 OPC_MXU_S16STD
= 0x2B,
1781 OPC_MXU_S16LDI
= 0x2C,
1782 OPC_MXU_S16SDI
= 0x2D,
1783 OPC_MXU_S32M2I
= 0x2E,
1784 OPC_MXU_S32I2M
= 0x2F,
1785 OPC_MXU_D32SLL
= 0x30,
1786 OPC_MXU_D32SLR
= 0x31,
1787 OPC_MXU_D32SARL
= 0x32,
1788 OPC_MXU_D32SAR
= 0x33,
1789 OPC_MXU_Q16SLL
= 0x34,
1790 OPC_MXU_Q16SLR
= 0x35,
1791 OPC_MXU__POOL18
= 0x36,
1792 OPC_MXU_Q16SAR
= 0x37,
1793 OPC_MXU__POOL19
= 0x38,
1794 OPC_MXU__POOL20
= 0x39,
1795 OPC_MXU__POOL21
= 0x3A,
1796 OPC_MXU_Q16SCOP
= 0x3B,
1797 OPC_MXU_Q8MADL
= 0x3C,
1798 OPC_MXU_S32SFL
= 0x3D,
1799 OPC_MXU_Q8SAD
= 0x3E,
1800 /* not assigned 0x3F */
1808 OPC_MXU_S32MAX
= 0x00,
1809 OPC_MXU_S32MIN
= 0x01,
1810 OPC_MXU_D16MAX
= 0x02,
1811 OPC_MXU_D16MIN
= 0x03,
1812 OPC_MXU_Q8MAX
= 0x04,
1813 OPC_MXU_Q8MIN
= 0x05,
1814 OPC_MXU_Q8SLT
= 0x06,
1815 OPC_MXU_Q8SLTU
= 0x07,
1822 OPC_MXU_S32SLT
= 0x00,
1823 OPC_MXU_D16SLT
= 0x01,
1824 OPC_MXU_D16AVG
= 0x02,
1825 OPC_MXU_D16AVGR
= 0x03,
1826 OPC_MXU_Q8AVG
= 0x04,
1827 OPC_MXU_Q8AVGR
= 0x05,
1828 OPC_MXU_Q8ADD
= 0x07,
1835 OPC_MXU_S32CPS
= 0x00,
1836 OPC_MXU_D16CPS
= 0x02,
1837 OPC_MXU_Q8ABD
= 0x04,
1838 OPC_MXU_Q16SAT
= 0x06,
1845 OPC_MXU_D16MULF
= 0x00,
1846 OPC_MXU_D16MULE
= 0x01,
1853 OPC_MXU_S32LDD
= 0x00,
1854 OPC_MXU_S32LDDR
= 0x01,
1861 OPC_MXU_S32STD
= 0x00,
1862 OPC_MXU_S32STDR
= 0x01,
1869 OPC_MXU_S32LDDV
= 0x00,
1870 OPC_MXU_S32LDDVR
= 0x01,
1877 OPC_MXU_S32STDV
= 0x00,
1878 OPC_MXU_S32STDVR
= 0x01,
1885 OPC_MXU_S32LDI
= 0x00,
1886 OPC_MXU_S32LDIR
= 0x01,
1893 OPC_MXU_S32SDI
= 0x00,
1894 OPC_MXU_S32SDIR
= 0x01,
1901 OPC_MXU_S32LDIV
= 0x00,
1902 OPC_MXU_S32LDIVR
= 0x01,
1909 OPC_MXU_S32SDIV
= 0x00,
1910 OPC_MXU_S32SDIVR
= 0x01,
1917 OPC_MXU_D32ACC
= 0x00,
1918 OPC_MXU_D32ACCM
= 0x01,
1919 OPC_MXU_D32ASUM
= 0x02,
1926 OPC_MXU_Q16ACC
= 0x00,
1927 OPC_MXU_Q16ACCM
= 0x01,
1928 OPC_MXU_Q16ASUM
= 0x02,
1935 OPC_MXU_Q8ADDE
= 0x00,
1936 OPC_MXU_D8SUM
= 0x01,
1937 OPC_MXU_D8SUMC
= 0x02,
1944 OPC_MXU_S32MUL
= 0x00,
1945 OPC_MXU_S32MULU
= 0x01,
1946 OPC_MXU_S32EXTR
= 0x02,
1947 OPC_MXU_S32EXTRV
= 0x03,
1954 OPC_MXU_D32SARW
= 0x00,
1955 OPC_MXU_S32ALN
= 0x01,
1956 OPC_MXU_S32ALNI
= 0x02,
1957 OPC_MXU_S32LUI
= 0x03,
1958 OPC_MXU_S32NOR
= 0x04,
1959 OPC_MXU_S32AND
= 0x05,
1960 OPC_MXU_S32OR
= 0x06,
1961 OPC_MXU_S32XOR
= 0x07,
1971 OPC_MXU_LXBU
= 0x04,
1972 OPC_MXU_LXHU
= 0x05,
1979 OPC_MXU_D32SLLV
= 0x00,
1980 OPC_MXU_D32SLRV
= 0x01,
1981 OPC_MXU_D32SARV
= 0x03,
1982 OPC_MXU_Q16SLLV
= 0x04,
1983 OPC_MXU_Q16SLRV
= 0x05,
1984 OPC_MXU_Q16SARV
= 0x07,
1991 OPC_MXU_Q8MUL
= 0x00,
1992 OPC_MXU_Q8MULSU
= 0x01,
1999 OPC_MXU_Q8MOVZ
= 0x00,
2000 OPC_MXU_Q8MOVN
= 0x01,
2001 OPC_MXU_D16MOVZ
= 0x02,
2002 OPC_MXU_D16MOVN
= 0x03,
2003 OPC_MXU_S32MOVZ
= 0x04,
2004 OPC_MXU_S32MOVN
= 0x05,
2011 OPC_MXU_Q8MAC
= 0x00,
2012 OPC_MXU_Q8MACSU
= 0x01,
2016 * Overview of the TX79-specific instruction set
2017 * =============================================
2019 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2020 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2021 * instructions and certain multimedia instructions (MMIs). These MMIs
2022 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2023 * or sixteen 8-bit paths.
2027 * The Toshiba TX System RISC TX79 Core Architecture manual,
2028 * https://wiki.qemu.org/File:C790.pdf
2030 * Three-Operand Multiply and Multiply-Add (4 instructions)
2031 * --------------------------------------------------------
2032 * MADD [rd,] rs, rt Multiply/Add
2033 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2034 * MULT [rd,] rs, rt Multiply (3-operand)
2035 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2037 * Multiply Instructions for Pipeline 1 (10 instructions)
2038 * ------------------------------------------------------
2039 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2040 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2041 * DIV1 rs, rt Divide Pipeline 1
2042 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2043 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2044 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2045 * MFHI1 rd Move From HI1 Register
2046 * MFLO1 rd Move From LO1 Register
2047 * MTHI1 rs Move To HI1 Register
2048 * MTLO1 rs Move To LO1 Register
2050 * Arithmetic (19 instructions)
2051 * ----------------------------
2052 * PADDB rd, rs, rt Parallel Add Byte
2053 * PSUBB rd, rs, rt Parallel Subtract Byte
2054 * PADDH rd, rs, rt Parallel Add Halfword
2055 * PSUBH rd, rs, rt Parallel Subtract Halfword
2056 * PADDW rd, rs, rt Parallel Add Word
2057 * PSUBW rd, rs, rt Parallel Subtract Word
2058 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2059 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2060 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2061 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2062 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2063 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2064 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2065 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2066 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2067 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2068 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2069 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2070 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2072 * Min/Max (4 instructions)
2073 * ------------------------
2074 * PMAXH rd, rs, rt Parallel Maximum Halfword
2075 * PMINH rd, rs, rt Parallel Minimum Halfword
2076 * PMAXW rd, rs, rt Parallel Maximum Word
2077 * PMINW rd, rs, rt Parallel Minimum Word
2079 * Absolute (2 instructions)
2080 * -------------------------
2081 * PABSH rd, rt Parallel Absolute Halfword
2082 * PABSW rd, rt Parallel Absolute Word
2084 * Logical (4 instructions)
2085 * ------------------------
2086 * PAND rd, rs, rt Parallel AND
2087 * POR rd, rs, rt Parallel OR
2088 * PXOR rd, rs, rt Parallel XOR
2089 * PNOR rd, rs, rt Parallel NOR
2091 * Shift (9 instructions)
2092 * ----------------------
2093 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2094 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2095 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2096 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2097 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2098 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2099 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2100 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2101 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2103 * Compare (6 instructions)
2104 * ------------------------
2105 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2106 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2107 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2108 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2109 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2110 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2112 * LZC (1 instruction)
2113 * -------------------
2114 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2116 * Quadword Load and Store (2 instructions)
2117 * ----------------------------------------
2118 * LQ rt, offset(base) Load Quadword
2119 * SQ rt, offset(base) Store Quadword
2121 * Multiply and Divide (19 instructions)
2122 * -------------------------------------
2123 * PMULTW rd, rs, rt Parallel Multiply Word
2124 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2125 * PDIVW rs, rt Parallel Divide Word
2126 * PDIVUW rs, rt Parallel Divide Unsigned Word
2127 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2128 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2129 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2130 * PMULTH rd, rs, rt Parallel Multiply Halfword
2131 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2132 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2133 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2134 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2135 * PDIVBW rs, rt Parallel Divide Broadcast Word
2136 * PMFHI rd Parallel Move From HI Register
2137 * PMFLO rd Parallel Move From LO Register
2138 * PMTHI rs Parallel Move To HI Register
2139 * PMTLO rs Parallel Move To LO Register
2140 * PMFHL rd Parallel Move From HI/LO Register
2141 * PMTHL rs Parallel Move To HI/LO Register
2143 * Pack/Extend (11 instructions)
2144 * -----------------------------
2145 * PPAC5 rd, rt Parallel Pack to 5 bits
2146 * PPACB rd, rs, rt Parallel Pack to Byte
2147 * PPACH rd, rs, rt Parallel Pack to Halfword
2148 * PPACW rd, rs, rt Parallel Pack to Word
2149 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2150 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2151 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2152 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2153 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2154 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2155 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2157 * Others (16 instructions)
2158 * ------------------------
2159 * PCPYH rd, rt Parallel Copy Halfword
2160 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2161 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2162 * PREVH rd, rt Parallel Reverse Halfword
2163 * PINTH rd, rs, rt Parallel Interleave Halfword
2164 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2165 * PEXEH rd, rt Parallel Exchange Even Halfword
2166 * PEXCH rd, rt Parallel Exchange Center Halfword
2167 * PEXEW rd, rt Parallel Exchange Even Word
2168 * PEXCW rd, rt Parallel Exchange Center Word
2169 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2170 * MFSA rd Move from Shift Amount Register
2171 * MTSA rs Move to Shift Amount Register
2172 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2173 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2174 * PROT3W rd, rt Parallel Rotate 3 Words
2176 * MMI (MultiMedia Instruction) encodings
2177 * ======================================
2179 * MMI instructions encoding table keys:
2181 * * This code is reserved for future use. An attempt to execute it
2182 * causes a Reserved Instruction exception.
2183 * % This code indicates an instruction class. The instruction word
2184 * must be further decoded by examining additional tables that show
2185 * the values for other instruction fields.
2186 * # This code is reserved for the unsupported instructions DMULT,
2187 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2188 * to execute it causes a Reserved Instruction exception.
2190 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2193 * +--------+----------------------------------------+
2195 * +--------+----------------------------------------+
2197 * opcode bits 28..26
2198 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2199 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2200 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2201 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2202 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2203 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2204 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2205 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2206 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2207 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2208 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2212 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2213 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2214 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2218 * MMI instructions with opcode field = MMI:
2221 * +--------+-------------------------------+--------+
2222 * | MMI | |function|
2223 * +--------+-------------------------------+--------+
2225 * function bits 2..0
2226 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2227 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2228 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2229 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2230 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2231 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2232 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2233 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2234 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2235 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2236 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2239 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2241 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2242 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2243 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2244 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2245 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2246 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2247 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2248 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2249 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2250 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2251 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2252 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2253 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2254 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2255 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2256 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2265 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2269 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2272 * +--------+----------------------+--------+--------+
2273 * | MMI | |function| MMI0 |
2274 * +--------+----------------------+--------+--------+
2276 * function bits 7..6
2277 * bits | 0 | 1 | 2 | 3
2278 * 10..8 | 00 | 01 | 10 | 11
2279 * -------+-------+-------+-------+-------
2280 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2281 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2282 * 2 010 | PADDB | PSUBB | PCGTB | *
2283 * 3 011 | * | * | * | *
2284 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2285 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2286 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2287 * 7 111 | * | * | PEXT5 | PPAC5
2290 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2292 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2293 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2294 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2316 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2320 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2323 * +--------+----------------------+--------+--------+
2324 * | MMI | |function| MMI1 |
2325 * +--------+----------------------+--------+--------+
2327 * function bits 7..6
2328 * bits | 0 | 1 | 2 | 3
2329 * 10..8 | 00 | 01 | 10 | 11
2330 * -------+-------+-------+-------+-------
2331 * 0 000 | * | PABSW | PCEQW | PMINW
2332 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2333 * 2 010 | * | * | PCEQB | *
2334 * 3 011 | * | * | * | *
2335 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2336 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2337 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2338 * 7 111 | * | * | * | *
2341 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2343 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2344 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2345 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2360 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2364 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2367 * +--------+----------------------+--------+--------+
2368 * | MMI | |function| MMI2 |
2369 * +--------+----------------------+--------+--------+
2371 * function bits 7..6
2372 * bits | 0 | 1 | 2 | 3
2373 * 10..8 | 00 | 01 | 10 | 11
2374 * -------+-------+-------+-------+-------
2375 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2376 * 1 001 | PMSUBW| * | * | *
2377 * 2 010 | PMFHI | PMFLO | PINTH | *
2378 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2379 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2380 * 5 101 | PMSUBH| PHMSBH| * | *
2381 * 6 110 | * | * | PEXEH | PREVH
2382 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2385 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2387 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2388 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2389 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2408 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2412 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2415 * +--------+----------------------+--------+--------+
2416 * | MMI | |function| MMI3 |
2417 * +--------+----------------------+--------+--------+
2419 * function bits 7..6
2420 * bits | 0 | 1 | 2 | 3
2421 * 10..8 | 00 | 01 | 10 | 11
2422 * -------+-------+-------+-------+-------
2423 * 0 000 |PMADDUW| * | * | PSRAVW
2424 * 1 001 | * | * | * | *
2425 * 2 010 | PMTHI | PMTLO | PINTEH| *
2426 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2427 * 4 100 | * | * | POR | PNOR
2428 * 5 101 | * | * | * | *
2429 * 6 110 | * | * | PEXCH | PCPYH
2430 * 7 111 | * | * | PEXCW | *
2433 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2435 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2436 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2437 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2447 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2450 /* global register indices */
2451 static TCGv cpu_gpr
[32], cpu_PC
;
2452 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2453 static TCGv cpu_dspctrl
, btarget
, bcond
;
2454 static TCGv cpu_lladdr
, cpu_llval
;
2455 static TCGv_i32 hflags
;
2456 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2457 static TCGv_i64 fpu_f64
[32];
2458 static TCGv_i64 msa_wr_d
[64];
2460 #if defined(TARGET_MIPS64)
2461 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2462 static TCGv_i64 cpu_mmr
[32];
2465 #if !defined(TARGET_MIPS64)
2467 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2471 #include "exec/gen-icount.h"
2473 #define gen_helper_0e0i(name, arg) do { \
2474 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2475 gen_helper_##name(cpu_env, helper_tmp); \
2476 tcg_temp_free_i32(helper_tmp); \
2479 #define gen_helper_0e1i(name, arg1, arg2) do { \
2480 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2481 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2482 tcg_temp_free_i32(helper_tmp); \
2485 #define gen_helper_1e0i(name, ret, arg1) do { \
2486 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2487 gen_helper_##name(ret, cpu_env, helper_tmp); \
2488 tcg_temp_free_i32(helper_tmp); \
2491 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2492 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2493 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2494 tcg_temp_free_i32(helper_tmp); \
2497 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2498 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2499 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2500 tcg_temp_free_i32(helper_tmp); \
2503 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2504 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2505 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2506 tcg_temp_free_i32(helper_tmp); \
2509 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2510 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2511 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2512 tcg_temp_free_i32(helper_tmp); \
2515 typedef struct DisasContext
{
2516 DisasContextBase base
;
2517 target_ulong saved_pc
;
2518 target_ulong page_start
;
2520 uint64_t insn_flags
;
2521 int32_t CP0_Config1
;
2522 int32_t CP0_Config2
;
2523 int32_t CP0_Config3
;
2524 int32_t CP0_Config5
;
2525 /* Routine used to access memory */
2527 TCGMemOp default_tcg_memop_mask
;
2528 uint32_t hflags
, saved_hflags
;
2529 target_ulong btarget
;
2540 int CP0_LLAddr_shift
;
2550 #define DISAS_STOP DISAS_TARGET_0
2551 #define DISAS_EXIT DISAS_TARGET_1
2553 static const char * const regnames
[] = {
2554 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2555 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2556 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2557 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2560 static const char * const regnames_HI
[] = {
2561 "HI0", "HI1", "HI2", "HI3",
2564 static const char * const regnames_LO
[] = {
2565 "LO0", "LO1", "LO2", "LO3",
2568 static const char * const fregnames
[] = {
2569 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2570 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2571 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2572 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2575 static const char * const msaregnames
[] = {
2576 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2577 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2578 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2579 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2580 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2581 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2582 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2583 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2584 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2585 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2586 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2587 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2588 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2589 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2590 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2591 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2594 #if !defined(TARGET_MIPS64)
2595 static const char * const mxuregnames
[] = {
2596 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2597 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2601 #define LOG_DISAS(...) \
2603 if (MIPS_DEBUG_DISAS) { \
2604 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2608 #define MIPS_INVAL(op) \
2610 if (MIPS_DEBUG_DISAS) { \
2611 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2612 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2613 ctx->base.pc_next, ctx->opcode, op, \
2614 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2615 ((ctx->opcode >> 16) & 0x1F)); \
2619 /* General purpose registers moves. */
2620 static inline void gen_load_gpr(TCGv t
, int reg
)
2623 tcg_gen_movi_tl(t
, 0);
2625 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2629 static inline void gen_store_gpr(TCGv t
, int reg
)
2632 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2636 /* Moves to/from shadow registers. */
2637 static inline void gen_load_srsgpr(int from
, int to
)
2639 TCGv t0
= tcg_temp_new();
2642 tcg_gen_movi_tl(t0
, 0);
2644 TCGv_i32 t2
= tcg_temp_new_i32();
2645 TCGv_ptr addr
= tcg_temp_new_ptr();
2647 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2648 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2649 tcg_gen_andi_i32(t2
, t2
, 0xf);
2650 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2651 tcg_gen_ext_i32_ptr(addr
, t2
);
2652 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2654 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2655 tcg_temp_free_ptr(addr
);
2656 tcg_temp_free_i32(t2
);
2658 gen_store_gpr(t0
, to
);
2662 static inline void gen_store_srsgpr (int from
, int to
)
2665 TCGv t0
= tcg_temp_new();
2666 TCGv_i32 t2
= tcg_temp_new_i32();
2667 TCGv_ptr addr
= tcg_temp_new_ptr();
2669 gen_load_gpr(t0
, from
);
2670 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2671 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2672 tcg_gen_andi_i32(t2
, t2
, 0xf);
2673 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2674 tcg_gen_ext_i32_ptr(addr
, t2
);
2675 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2677 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2678 tcg_temp_free_ptr(addr
);
2679 tcg_temp_free_i32(t2
);
2684 #if !defined(TARGET_MIPS64)
2685 /* MXU General purpose registers moves. */
2686 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2689 tcg_gen_movi_tl(t
, 0);
2690 } else if (reg
<= 15) {
2691 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2695 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2697 if (reg
> 0 && reg
<= 15) {
2698 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2702 /* MXU control register moves. */
2703 static inline void gen_load_mxu_cr(TCGv t
)
2705 tcg_gen_mov_tl(t
, mxu_CR
);
2708 static inline void gen_store_mxu_cr(TCGv t
)
2710 /* TODO: Add handling of RW rules for MXU_CR. */
2711 tcg_gen_mov_tl(mxu_CR
, t
);
2717 static inline void gen_save_pc(target_ulong pc
)
2719 tcg_gen_movi_tl(cpu_PC
, pc
);
2722 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2724 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2725 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2726 gen_save_pc(ctx
->base
.pc_next
);
2727 ctx
->saved_pc
= ctx
->base
.pc_next
;
2729 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2730 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2731 ctx
->saved_hflags
= ctx
->hflags
;
2732 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2738 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2744 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2746 ctx
->saved_hflags
= ctx
->hflags
;
2747 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2753 ctx
->btarget
= env
->btarget
;
2758 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2760 TCGv_i32 texcp
= tcg_const_i32(excp
);
2761 TCGv_i32 terr
= tcg_const_i32(err
);
2762 save_cpu_state(ctx
, 1);
2763 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2764 tcg_temp_free_i32(terr
);
2765 tcg_temp_free_i32(texcp
);
2766 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2769 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2771 gen_helper_0e0i(raise_exception
, excp
);
2774 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2776 generate_exception_err(ctx
, excp
, 0);
2779 /* Floating point register moves. */
2780 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2782 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2783 generate_exception(ctx
, EXCP_RI
);
2785 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2788 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2791 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2792 generate_exception(ctx
, EXCP_RI
);
2794 t64
= tcg_temp_new_i64();
2795 tcg_gen_extu_i32_i64(t64
, t
);
2796 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2797 tcg_temp_free_i64(t64
);
2800 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2802 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2803 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2805 gen_load_fpr32(ctx
, t
, reg
| 1);
2809 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2811 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2812 TCGv_i64 t64
= tcg_temp_new_i64();
2813 tcg_gen_extu_i32_i64(t64
, t
);
2814 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2815 tcg_temp_free_i64(t64
);
2817 gen_store_fpr32(ctx
, t
, reg
| 1);
2821 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2823 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2824 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2826 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2830 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2832 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2833 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2836 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2837 t0
= tcg_temp_new_i64();
2838 tcg_gen_shri_i64(t0
, t
, 32);
2839 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2840 tcg_temp_free_i64(t0
);
2844 static inline int get_fp_bit(int cc
)
2853 /* Addresses computation */
2854 static inline void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
,
2857 tcg_gen_add_tl(ret
, arg0
, arg1
);
2859 #if defined(TARGET_MIPS64)
2860 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2861 tcg_gen_ext32s_i64(ret
, ret
);
2866 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2869 tcg_gen_addi_tl(ret
, base
, ofs
);
2871 #if defined(TARGET_MIPS64)
2872 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2873 tcg_gen_ext32s_i64(ret
, ret
);
2878 /* Addresses computation (translation time) */
2879 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2882 target_long sum
= base
+ offset
;
2884 #if defined(TARGET_MIPS64)
2885 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2892 /* Sign-extract the low 32-bits to a target_long. */
2893 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2895 #if defined(TARGET_MIPS64)
2896 tcg_gen_ext32s_i64(ret
, arg
);
2898 tcg_gen_extrl_i64_i32(ret
, arg
);
2902 /* Sign-extract the high 32-bits to a target_long. */
2903 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2905 #if defined(TARGET_MIPS64)
2906 tcg_gen_sari_i64(ret
, arg
, 32);
2908 tcg_gen_extrh_i64_i32(ret
, arg
);
2912 static inline void check_cp0_enabled(DisasContext
*ctx
)
2914 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2915 generate_exception_err(ctx
, EXCP_CpU
, 0);
2919 static inline void check_cp1_enabled(DisasContext
*ctx
)
2921 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
2922 generate_exception_err(ctx
, EXCP_CpU
, 1);
2926 /* Verify that the processor is running with COP1X instructions enabled.
2927 This is associated with the nabla symbol in the MIPS32 and MIPS64
2930 static inline void check_cop1x(DisasContext
*ctx
)
2932 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
2933 generate_exception_end(ctx
, EXCP_RI
);
2937 /* Verify that the processor is running with 64-bit floating-point
2938 operations enabled. */
2940 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2942 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
2943 generate_exception_end(ctx
, EXCP_RI
);
2948 * Verify if floating point register is valid; an operation is not defined
2949 * if bit 0 of any register specification is set and the FR bit in the
2950 * Status register equals zero, since the register numbers specify an
2951 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2952 * in the Status register equals one, both even and odd register numbers
2953 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2955 * Multiple 64 bit wide registers can be checked by calling
2956 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2958 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2960 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
2961 generate_exception_end(ctx
, EXCP_RI
);
2965 /* Verify that the processor is running with DSP instructions enabled.
2966 This is enabled by CP0 Status register MX(24) bit.
2969 static inline void check_dsp(DisasContext
*ctx
)
2971 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2972 if (ctx
->insn_flags
& ASE_DSP
) {
2973 generate_exception_end(ctx
, EXCP_DSPDIS
);
2975 generate_exception_end(ctx
, EXCP_RI
);
2980 static inline void check_dsp_r2(DisasContext
*ctx
)
2982 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2983 if (ctx
->insn_flags
& ASE_DSP
) {
2984 generate_exception_end(ctx
, EXCP_DSPDIS
);
2986 generate_exception_end(ctx
, EXCP_RI
);
2991 static inline void check_dsp_r3(DisasContext
*ctx
)
2993 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2994 if (ctx
->insn_flags
& ASE_DSP
) {
2995 generate_exception_end(ctx
, EXCP_DSPDIS
);
2997 generate_exception_end(ctx
, EXCP_RI
);
3002 /* This code generates a "reserved instruction" exception if the
3003 CPU does not support the instruction set corresponding to flags. */
3004 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
3006 if (unlikely(!(ctx
->insn_flags
& flags
))) {
3007 generate_exception_end(ctx
, EXCP_RI
);
3011 /* This code generates a "reserved instruction" exception if the
3012 CPU has corresponding flag set which indicates that the instruction
3013 has been removed. */
3014 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3016 if (unlikely(ctx
->insn_flags
& flags
)) {
3017 generate_exception_end(ctx
, EXCP_RI
);
3022 * The Linux kernel traps certain reserved instruction exceptions to
3023 * emulate the corresponding instructions. QEMU is the kernel in user
3024 * mode, so those traps are emulated by accepting the instructions.
3026 * A reserved instruction exception is generated for flagged CPUs if
3027 * QEMU runs in system mode.
3029 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3031 #ifndef CONFIG_USER_ONLY
3032 check_insn_opc_removed(ctx
, flags
);
3036 /* This code generates a "reserved instruction" exception if the
3037 CPU does not support 64-bit paired-single (PS) floating point data type */
3038 static inline void check_ps(DisasContext
*ctx
)
3040 if (unlikely(!ctx
->ps
)) {
3041 generate_exception(ctx
, EXCP_RI
);
3043 check_cp1_64bitmode(ctx
);
3046 #ifdef TARGET_MIPS64
3047 /* This code generates a "reserved instruction" exception if 64-bit
3048 instructions are not enabled. */
3049 static inline void check_mips_64(DisasContext
*ctx
)
3051 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
))) {
3052 generate_exception_end(ctx
, EXCP_RI
);
3057 #ifndef CONFIG_USER_ONLY
3058 static inline void check_mvh(DisasContext
*ctx
)
3060 if (unlikely(!ctx
->mvh
)) {
3061 generate_exception(ctx
, EXCP_RI
);
3067 * This code generates a "reserved instruction" exception if the
3068 * Config5 XNP bit is set.
3070 static inline void check_xnp(DisasContext
*ctx
)
3072 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3073 generate_exception_end(ctx
, EXCP_RI
);
3077 #ifndef CONFIG_USER_ONLY
3079 * This code generates a "reserved instruction" exception if the
3080 * Config3 PW bit is NOT set.
3082 static inline void check_pw(DisasContext
*ctx
)
3084 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3085 generate_exception_end(ctx
, EXCP_RI
);
3091 * This code generates a "reserved instruction" exception if the
3092 * Config3 MT bit is NOT set.
3094 static inline void check_mt(DisasContext
*ctx
)
3096 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3097 generate_exception_end(ctx
, EXCP_RI
);
3101 #ifndef CONFIG_USER_ONLY
3103 * This code generates a "coprocessor unusable" exception if CP0 is not
3104 * available, and, if that is not the case, generates a "reserved instruction"
3105 * exception if the Config5 MT bit is NOT set. This is needed for availability
3106 * control of some of MT ASE instructions.
3108 static inline void check_cp0_mt(DisasContext
*ctx
)
3110 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3111 generate_exception_err(ctx
, EXCP_CpU
, 0);
3113 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3114 generate_exception_err(ctx
, EXCP_RI
, 0);
3121 * This code generates a "reserved instruction" exception if the
3122 * Config5 NMS bit is set.
3124 static inline void check_nms(DisasContext
*ctx
)
3126 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3127 generate_exception_end(ctx
, EXCP_RI
);
3132 * This code generates a "reserved instruction" exception if the
3133 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3134 * Config2 TL, and Config5 L2C are unset.
3136 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3138 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3139 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3140 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3141 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3142 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3143 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
3144 generate_exception_end(ctx
, EXCP_RI
);
3149 * This code generates a "reserved instruction" exception if the
3150 * Config5 EVA bit is NOT set.
3152 static inline void check_eva(DisasContext
*ctx
)
3154 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3155 generate_exception_end(ctx
, EXCP_RI
);
3160 /* Define small wrappers for gen_load_fpr* so that we have a uniform
3161 calling interface for 32 and 64-bit FPRs. No sense in changing
3162 all callers for gen_load_fpr32 when we need the CTX parameter for
3164 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3165 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3166 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3167 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3168 int ft, int fs, int cc) \
3170 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3171 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3180 check_cp1_registers(ctx, fs | ft); \
3188 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3189 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3192 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
3195 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
3198 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
3201 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
3204 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
3207 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
3210 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
3213 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
3216 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
3219 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
3222 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
3225 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
3228 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
3231 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
3234 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
3237 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
3242 tcg_temp_free_i##bits (fp0); \
3243 tcg_temp_free_i##bits (fp1); \
3246 FOP_CONDS(, 0, d
, FMT_D
, 64)
3247 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3248 FOP_CONDS(, 0, s
, FMT_S
, 32)
3249 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3250 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3251 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3254 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3255 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3256 int ft, int fs, int fd) \
3258 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3259 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3260 if (ifmt == FMT_D) { \
3261 check_cp1_registers(ctx, fs | ft | fd); \
3263 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3264 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3267 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3270 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3273 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3276 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3279 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3282 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3285 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3288 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3291 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3294 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3297 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3300 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3303 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3306 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3309 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3312 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3315 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3318 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3321 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3324 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3327 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3330 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3336 tcg_temp_free_i ## bits (fp0); \
3337 tcg_temp_free_i ## bits (fp1); \
3340 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3341 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3343 #undef gen_ldcmp_fpr32
3344 #undef gen_ldcmp_fpr64
3346 /* load/store instructions. */
3347 #ifdef CONFIG_USER_ONLY
3348 #define OP_LD_ATOMIC(insn,fname) \
3349 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3350 DisasContext *ctx) \
3352 TCGv t0 = tcg_temp_new(); \
3353 tcg_gen_mov_tl(t0, arg1); \
3354 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3355 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3356 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3357 tcg_temp_free(t0); \
3360 #define OP_LD_ATOMIC(insn,fname) \
3361 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3362 DisasContext *ctx) \
3364 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3367 OP_LD_ATOMIC(ll
,ld32s
);
3368 #if defined(TARGET_MIPS64)
3369 OP_LD_ATOMIC(lld
,ld64
);
3373 static void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
,
3374 int base
, int offset
)
3377 tcg_gen_movi_tl(addr
, offset
);
3378 } else if (offset
== 0) {
3379 gen_load_gpr(addr
, base
);
3381 tcg_gen_movi_tl(addr
, offset
);
3382 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3386 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
3388 target_ulong pc
= ctx
->base
.pc_next
;
3390 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3391 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3396 pc
&= ~(target_ulong
)3;
3401 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3402 int rt
, int base
, int offset
)
3405 int mem_idx
= ctx
->mem_idx
;
3407 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3408 /* Loongson CPU uses a load to zero register for prefetch.
3409 We emulate it as a NOP. On other CPU we must perform the
3410 actual memory access. */
3414 t0
= tcg_temp_new();
3415 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3418 #if defined(TARGET_MIPS64)
3420 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3421 ctx
->default_tcg_memop_mask
);
3422 gen_store_gpr(t0
, rt
);
3425 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3426 ctx
->default_tcg_memop_mask
);
3427 gen_store_gpr(t0
, rt
);
3431 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3432 gen_store_gpr(t0
, rt
);
3435 t1
= tcg_temp_new();
3436 /* Do a byte access to possibly trigger a page
3437 fault with the unaligned address. */
3438 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3439 tcg_gen_andi_tl(t1
, t0
, 7);
3440 #ifndef TARGET_WORDS_BIGENDIAN
3441 tcg_gen_xori_tl(t1
, t1
, 7);
3443 tcg_gen_shli_tl(t1
, t1
, 3);
3444 tcg_gen_andi_tl(t0
, t0
, ~7);
3445 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3446 tcg_gen_shl_tl(t0
, t0
, t1
);
3447 t2
= tcg_const_tl(-1);
3448 tcg_gen_shl_tl(t2
, t2
, t1
);
3449 gen_load_gpr(t1
, rt
);
3450 tcg_gen_andc_tl(t1
, t1
, t2
);
3452 tcg_gen_or_tl(t0
, t0
, t1
);
3454 gen_store_gpr(t0
, rt
);
3457 t1
= tcg_temp_new();
3458 /* Do a byte access to possibly trigger a page
3459 fault with the unaligned address. */
3460 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3461 tcg_gen_andi_tl(t1
, t0
, 7);
3462 #ifdef TARGET_WORDS_BIGENDIAN
3463 tcg_gen_xori_tl(t1
, t1
, 7);
3465 tcg_gen_shli_tl(t1
, t1
, 3);
3466 tcg_gen_andi_tl(t0
, t0
, ~7);
3467 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3468 tcg_gen_shr_tl(t0
, t0
, t1
);
3469 tcg_gen_xori_tl(t1
, t1
, 63);
3470 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3471 tcg_gen_shl_tl(t2
, t2
, t1
);
3472 gen_load_gpr(t1
, rt
);
3473 tcg_gen_and_tl(t1
, t1
, t2
);
3475 tcg_gen_or_tl(t0
, t0
, t1
);
3477 gen_store_gpr(t0
, rt
);
3480 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3481 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3483 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3484 gen_store_gpr(t0
, rt
);
3488 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3489 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3491 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3492 gen_store_gpr(t0
, rt
);
3495 mem_idx
= MIPS_HFLAG_UM
;
3498 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3499 ctx
->default_tcg_memop_mask
);
3500 gen_store_gpr(t0
, rt
);
3503 mem_idx
= MIPS_HFLAG_UM
;
3506 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3507 ctx
->default_tcg_memop_mask
);
3508 gen_store_gpr(t0
, rt
);
3511 mem_idx
= MIPS_HFLAG_UM
;
3514 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3515 ctx
->default_tcg_memop_mask
);
3516 gen_store_gpr(t0
, rt
);
3519 mem_idx
= MIPS_HFLAG_UM
;
3522 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3523 gen_store_gpr(t0
, rt
);
3526 mem_idx
= MIPS_HFLAG_UM
;
3529 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3530 gen_store_gpr(t0
, rt
);
3533 mem_idx
= MIPS_HFLAG_UM
;
3536 t1
= tcg_temp_new();
3537 /* Do a byte access to possibly trigger a page
3538 fault with the unaligned address. */
3539 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3540 tcg_gen_andi_tl(t1
, t0
, 3);
3541 #ifndef TARGET_WORDS_BIGENDIAN
3542 tcg_gen_xori_tl(t1
, t1
, 3);
3544 tcg_gen_shli_tl(t1
, t1
, 3);
3545 tcg_gen_andi_tl(t0
, t0
, ~3);
3546 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3547 tcg_gen_shl_tl(t0
, t0
, t1
);
3548 t2
= tcg_const_tl(-1);
3549 tcg_gen_shl_tl(t2
, t2
, t1
);
3550 gen_load_gpr(t1
, rt
);
3551 tcg_gen_andc_tl(t1
, t1
, t2
);
3553 tcg_gen_or_tl(t0
, t0
, t1
);
3555 tcg_gen_ext32s_tl(t0
, t0
);
3556 gen_store_gpr(t0
, rt
);
3559 mem_idx
= MIPS_HFLAG_UM
;
3562 t1
= tcg_temp_new();
3563 /* Do a byte access to possibly trigger a page
3564 fault with the unaligned address. */
3565 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3566 tcg_gen_andi_tl(t1
, t0
, 3);
3567 #ifdef TARGET_WORDS_BIGENDIAN
3568 tcg_gen_xori_tl(t1
, t1
, 3);
3570 tcg_gen_shli_tl(t1
, t1
, 3);
3571 tcg_gen_andi_tl(t0
, t0
, ~3);
3572 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3573 tcg_gen_shr_tl(t0
, t0
, t1
);
3574 tcg_gen_xori_tl(t1
, t1
, 31);
3575 t2
= tcg_const_tl(0xfffffffeull
);
3576 tcg_gen_shl_tl(t2
, t2
, t1
);
3577 gen_load_gpr(t1
, rt
);
3578 tcg_gen_and_tl(t1
, t1
, t2
);
3580 tcg_gen_or_tl(t0
, t0
, t1
);
3582 tcg_gen_ext32s_tl(t0
, t0
);
3583 gen_store_gpr(t0
, rt
);
3586 mem_idx
= MIPS_HFLAG_UM
;
3590 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3591 gen_store_gpr(t0
, rt
);
3597 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3598 uint32_t reg1
, uint32_t reg2
)
3600 TCGv taddr
= tcg_temp_new();
3601 TCGv_i64 tval
= tcg_temp_new_i64();
3602 TCGv tmp1
= tcg_temp_new();
3603 TCGv tmp2
= tcg_temp_new();
3605 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3606 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3607 #ifdef TARGET_WORDS_BIGENDIAN
3608 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3610 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3612 gen_store_gpr(tmp1
, reg1
);
3613 tcg_temp_free(tmp1
);
3614 gen_store_gpr(tmp2
, reg2
);
3615 tcg_temp_free(tmp2
);
3616 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3617 tcg_temp_free_i64(tval
);
3618 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3619 tcg_temp_free(taddr
);
3623 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
3624 int base
, int offset
)
3626 TCGv t0
= tcg_temp_new();
3627 TCGv t1
= tcg_temp_new();
3628 int mem_idx
= ctx
->mem_idx
;
3630 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3631 gen_load_gpr(t1
, rt
);
3633 #if defined(TARGET_MIPS64)
3635 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3636 ctx
->default_tcg_memop_mask
);
3639 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3642 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3646 mem_idx
= MIPS_HFLAG_UM
;
3649 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3650 ctx
->default_tcg_memop_mask
);
3653 mem_idx
= MIPS_HFLAG_UM
;
3656 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3657 ctx
->default_tcg_memop_mask
);
3660 mem_idx
= MIPS_HFLAG_UM
;
3663 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3666 mem_idx
= MIPS_HFLAG_UM
;
3669 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3672 mem_idx
= MIPS_HFLAG_UM
;
3675 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3683 /* Store conditional */
3684 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3685 TCGMemOp tcg_mo
, bool eva
)
3688 TCGLabel
*l1
= gen_new_label();
3689 TCGLabel
*done
= gen_new_label();
3691 t0
= tcg_temp_new();
3692 addr
= tcg_temp_new();
3693 /* compare the address against that of the preceeding LL */
3694 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3695 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3696 tcg_temp_free(addr
);
3697 tcg_gen_movi_tl(t0
, 0);
3698 gen_store_gpr(t0
, rt
);
3702 /* generate cmpxchg */
3703 val
= tcg_temp_new();
3704 gen_load_gpr(val
, rt
);
3705 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3706 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3707 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3708 gen_store_gpr(t0
, rt
);
3711 gen_set_label(done
);
3716 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3717 uint32_t reg1
, uint32_t reg2
, bool eva
)
3719 TCGv taddr
= tcg_temp_local_new();
3720 TCGv lladdr
= tcg_temp_local_new();
3721 TCGv_i64 tval
= tcg_temp_new_i64();
3722 TCGv_i64 llval
= tcg_temp_new_i64();
3723 TCGv_i64 val
= tcg_temp_new_i64();
3724 TCGv tmp1
= tcg_temp_new();
3725 TCGv tmp2
= tcg_temp_new();
3726 TCGLabel
*lab_fail
= gen_new_label();
3727 TCGLabel
*lab_done
= gen_new_label();
3729 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3731 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3732 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3734 gen_load_gpr(tmp1
, reg1
);
3735 gen_load_gpr(tmp2
, reg2
);
3737 #ifdef TARGET_WORDS_BIGENDIAN
3738 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3740 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3743 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3744 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3745 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3747 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3749 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3751 gen_set_label(lab_fail
);
3754 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3756 gen_set_label(lab_done
);
3757 tcg_gen_movi_tl(lladdr
, -1);
3758 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3761 /* Load and store */
3762 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
3765 /* Don't do NOP if destination is zero: we must perform the actual
3770 TCGv_i32 fp0
= tcg_temp_new_i32();
3771 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3772 ctx
->default_tcg_memop_mask
);
3773 gen_store_fpr32(ctx
, fp0
, ft
);
3774 tcg_temp_free_i32(fp0
);
3779 TCGv_i32 fp0
= tcg_temp_new_i32();
3780 gen_load_fpr32(ctx
, fp0
, ft
);
3781 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3782 ctx
->default_tcg_memop_mask
);
3783 tcg_temp_free_i32(fp0
);
3788 TCGv_i64 fp0
= tcg_temp_new_i64();
3789 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3790 ctx
->default_tcg_memop_mask
);
3791 gen_store_fpr64(ctx
, fp0
, ft
);
3792 tcg_temp_free_i64(fp0
);
3797 TCGv_i64 fp0
= tcg_temp_new_i64();
3798 gen_load_fpr64(ctx
, fp0
, ft
);
3799 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3800 ctx
->default_tcg_memop_mask
);
3801 tcg_temp_free_i64(fp0
);
3805 MIPS_INVAL("flt_ldst");
3806 generate_exception_end(ctx
, EXCP_RI
);
3811 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3812 int rs
, int16_t imm
)
3814 TCGv t0
= tcg_temp_new();
3816 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3817 check_cp1_enabled(ctx
);
3821 check_insn(ctx
, ISA_MIPS2
);
3824 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3825 gen_flt_ldst(ctx
, op
, rt
, t0
);
3828 generate_exception_err(ctx
, EXCP_CpU
, 1);
3833 /* Arithmetic with immediate operand */
3834 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3835 int rt
, int rs
, int imm
)
3837 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3839 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3840 /* If no destination, treat it as a NOP.
3841 For addi, we must generate the overflow exception when needed. */
3847 TCGv t0
= tcg_temp_local_new();
3848 TCGv t1
= tcg_temp_new();
3849 TCGv t2
= tcg_temp_new();
3850 TCGLabel
*l1
= gen_new_label();
3852 gen_load_gpr(t1
, rs
);
3853 tcg_gen_addi_tl(t0
, t1
, uimm
);
3854 tcg_gen_ext32s_tl(t0
, t0
);
3856 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3857 tcg_gen_xori_tl(t2
, t0
, uimm
);
3858 tcg_gen_and_tl(t1
, t1
, t2
);
3860 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3862 /* operands of same sign, result different sign */
3863 generate_exception(ctx
, EXCP_OVERFLOW
);
3865 tcg_gen_ext32s_tl(t0
, t0
);
3866 gen_store_gpr(t0
, rt
);
3872 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3873 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3875 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3878 #if defined(TARGET_MIPS64)
3881 TCGv t0
= tcg_temp_local_new();
3882 TCGv t1
= tcg_temp_new();
3883 TCGv t2
= tcg_temp_new();
3884 TCGLabel
*l1
= gen_new_label();
3886 gen_load_gpr(t1
, rs
);
3887 tcg_gen_addi_tl(t0
, t1
, uimm
);
3889 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3890 tcg_gen_xori_tl(t2
, t0
, uimm
);
3891 tcg_gen_and_tl(t1
, t1
, t2
);
3893 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3895 /* operands of same sign, result different sign */
3896 generate_exception(ctx
, EXCP_OVERFLOW
);
3898 gen_store_gpr(t0
, rt
);
3904 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3906 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3913 /* Logic with immediate operand */
3914 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3915 int rt
, int rs
, int16_t imm
)
3920 /* If no destination, treat it as a NOP. */
3923 uimm
= (uint16_t)imm
;
3926 if (likely(rs
!= 0)) {
3927 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3929 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3934 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3936 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3940 if (likely(rs
!= 0)) {
3941 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3943 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3947 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3949 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3950 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3952 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3961 /* Set on less than with immediate operand */
3962 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3963 int rt
, int rs
, int16_t imm
)
3965 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3969 /* If no destination, treat it as a NOP. */
3972 t0
= tcg_temp_new();
3973 gen_load_gpr(t0
, rs
);
3976 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3979 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3985 /* Shifts with immediate operand */
3986 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3987 int rt
, int rs
, int16_t imm
)
3989 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3993 /* If no destination, treat it as a NOP. */
3997 t0
= tcg_temp_new();
3998 gen_load_gpr(t0
, rs
);
4001 tcg_gen_shli_tl(t0
, t0
, uimm
);
4002 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4005 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4009 tcg_gen_ext32u_tl(t0
, t0
);
4010 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4012 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4017 TCGv_i32 t1
= tcg_temp_new_i32();
4019 tcg_gen_trunc_tl_i32(t1
, t0
);
4020 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4021 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4022 tcg_temp_free_i32(t1
);
4024 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4027 #if defined(TARGET_MIPS64)
4029 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4032 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4035 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4039 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4041 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4045 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4048 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4051 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4054 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4062 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4063 int rd
, int rs
, int rt
)
4065 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4066 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4067 /* If no destination, treat it as a NOP.
4068 For add & sub, we must generate the overflow exception when needed. */
4075 TCGv t0
= tcg_temp_local_new();
4076 TCGv t1
= tcg_temp_new();
4077 TCGv t2
= tcg_temp_new();
4078 TCGLabel
*l1
= gen_new_label();
4080 gen_load_gpr(t1
, rs
);
4081 gen_load_gpr(t2
, rt
);
4082 tcg_gen_add_tl(t0
, t1
, t2
);
4083 tcg_gen_ext32s_tl(t0
, t0
);
4084 tcg_gen_xor_tl(t1
, t1
, t2
);
4085 tcg_gen_xor_tl(t2
, t0
, t2
);
4086 tcg_gen_andc_tl(t1
, t2
, t1
);
4088 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4090 /* operands of same sign, result different sign */
4091 generate_exception(ctx
, EXCP_OVERFLOW
);
4093 gen_store_gpr(t0
, rd
);
4098 if (rs
!= 0 && rt
!= 0) {
4099 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4100 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4101 } else if (rs
== 0 && rt
!= 0) {
4102 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4103 } else if (rs
!= 0 && rt
== 0) {
4104 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4106 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4111 TCGv t0
= tcg_temp_local_new();
4112 TCGv t1
= tcg_temp_new();
4113 TCGv t2
= tcg_temp_new();
4114 TCGLabel
*l1
= gen_new_label();
4116 gen_load_gpr(t1
, rs
);
4117 gen_load_gpr(t2
, rt
);
4118 tcg_gen_sub_tl(t0
, t1
, t2
);
4119 tcg_gen_ext32s_tl(t0
, t0
);
4120 tcg_gen_xor_tl(t2
, t1
, t2
);
4121 tcg_gen_xor_tl(t1
, t0
, t1
);
4122 tcg_gen_and_tl(t1
, t1
, t2
);
4124 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4126 /* operands of different sign, first operand and result different sign */
4127 generate_exception(ctx
, EXCP_OVERFLOW
);
4129 gen_store_gpr(t0
, rd
);
4134 if (rs
!= 0 && rt
!= 0) {
4135 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4136 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4137 } else if (rs
== 0 && rt
!= 0) {
4138 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4139 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4140 } else if (rs
!= 0 && rt
== 0) {
4141 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4143 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4146 #if defined(TARGET_MIPS64)
4149 TCGv t0
= tcg_temp_local_new();
4150 TCGv t1
= tcg_temp_new();
4151 TCGv t2
= tcg_temp_new();
4152 TCGLabel
*l1
= gen_new_label();
4154 gen_load_gpr(t1
, rs
);
4155 gen_load_gpr(t2
, rt
);
4156 tcg_gen_add_tl(t0
, t1
, t2
);
4157 tcg_gen_xor_tl(t1
, t1
, t2
);
4158 tcg_gen_xor_tl(t2
, t0
, t2
);
4159 tcg_gen_andc_tl(t1
, t2
, t1
);
4161 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4163 /* operands of same sign, result different sign */
4164 generate_exception(ctx
, EXCP_OVERFLOW
);
4166 gen_store_gpr(t0
, rd
);
4171 if (rs
!= 0 && rt
!= 0) {
4172 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4173 } else if (rs
== 0 && rt
!= 0) {
4174 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4175 } else if (rs
!= 0 && rt
== 0) {
4176 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4178 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4183 TCGv t0
= tcg_temp_local_new();
4184 TCGv t1
= tcg_temp_new();
4185 TCGv t2
= tcg_temp_new();
4186 TCGLabel
*l1
= gen_new_label();
4188 gen_load_gpr(t1
, rs
);
4189 gen_load_gpr(t2
, rt
);
4190 tcg_gen_sub_tl(t0
, t1
, t2
);
4191 tcg_gen_xor_tl(t2
, t1
, t2
);
4192 tcg_gen_xor_tl(t1
, t0
, t1
);
4193 tcg_gen_and_tl(t1
, t1
, t2
);
4195 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4197 /* operands of different sign, first operand and result different sign */
4198 generate_exception(ctx
, EXCP_OVERFLOW
);
4200 gen_store_gpr(t0
, rd
);
4205 if (rs
!= 0 && rt
!= 0) {
4206 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4207 } else if (rs
== 0 && rt
!= 0) {
4208 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4209 } else if (rs
!= 0 && rt
== 0) {
4210 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4212 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4217 if (likely(rs
!= 0 && rt
!= 0)) {
4218 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4219 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4221 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4227 /* Conditional move */
4228 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4229 int rd
, int rs
, int rt
)
4234 /* If no destination, treat it as a NOP. */
4238 t0
= tcg_temp_new();
4239 gen_load_gpr(t0
, rt
);
4240 t1
= tcg_const_tl(0);
4241 t2
= tcg_temp_new();
4242 gen_load_gpr(t2
, rs
);
4245 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4248 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4251 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4254 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4263 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4264 int rd
, int rs
, int rt
)
4267 /* If no destination, treat it as a NOP. */
4273 if (likely(rs
!= 0 && rt
!= 0)) {
4274 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4276 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4280 if (rs
!= 0 && rt
!= 0) {
4281 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4282 } else if (rs
== 0 && rt
!= 0) {
4283 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4284 } else if (rs
!= 0 && rt
== 0) {
4285 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4287 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4291 if (likely(rs
!= 0 && rt
!= 0)) {
4292 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4293 } else if (rs
== 0 && rt
!= 0) {
4294 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4295 } else if (rs
!= 0 && rt
== 0) {
4296 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4298 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4302 if (likely(rs
!= 0 && rt
!= 0)) {
4303 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4304 } else if (rs
== 0 && rt
!= 0) {
4305 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4306 } else if (rs
!= 0 && rt
== 0) {
4307 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4309 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4315 /* Set on lower than */
4316 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4317 int rd
, int rs
, int rt
)
4322 /* If no destination, treat it as a NOP. */
4326 t0
= tcg_temp_new();
4327 t1
= tcg_temp_new();
4328 gen_load_gpr(t0
, rs
);
4329 gen_load_gpr(t1
, rt
);
4332 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4335 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4343 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4344 int rd
, int rs
, int rt
)
4349 /* If no destination, treat it as a NOP.
4350 For add & sub, we must generate the overflow exception when needed. */
4354 t0
= tcg_temp_new();
4355 t1
= tcg_temp_new();
4356 gen_load_gpr(t0
, rs
);
4357 gen_load_gpr(t1
, rt
);
4360 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4361 tcg_gen_shl_tl(t0
, t1
, t0
);
4362 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4365 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4366 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4369 tcg_gen_ext32u_tl(t1
, t1
);
4370 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4371 tcg_gen_shr_tl(t0
, t1
, t0
);
4372 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4376 TCGv_i32 t2
= tcg_temp_new_i32();
4377 TCGv_i32 t3
= tcg_temp_new_i32();
4379 tcg_gen_trunc_tl_i32(t2
, t0
);
4380 tcg_gen_trunc_tl_i32(t3
, t1
);
4381 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4382 tcg_gen_rotr_i32(t2
, t3
, t2
);
4383 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4384 tcg_temp_free_i32(t2
);
4385 tcg_temp_free_i32(t3
);
4388 #if defined(TARGET_MIPS64)
4390 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4391 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4394 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4395 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4398 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4399 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4402 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4403 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4411 #if defined(TARGET_MIPS64)
4412 /* Copy GPR to and from TX79 HI1/LO1 register. */
4413 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4415 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4422 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4425 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4429 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4431 tcg_gen_movi_tl(cpu_HI
[1], 0);
4436 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4438 tcg_gen_movi_tl(cpu_LO
[1], 0);
4442 MIPS_INVAL("mfthilo1 TX79");
4443 generate_exception_end(ctx
, EXCP_RI
);
4449 /* Arithmetic on HI/LO registers */
4450 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4452 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4463 #if defined(TARGET_MIPS64)
4465 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4469 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4473 #if defined(TARGET_MIPS64)
4475 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4479 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4484 #if defined(TARGET_MIPS64)
4486 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4490 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4493 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4498 #if defined(TARGET_MIPS64)
4500 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4504 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4507 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4513 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4516 TCGv t0
= tcg_const_tl(addr
);
4517 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4518 gen_store_gpr(t0
, reg
);
4522 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4528 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4531 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4532 addr
= addr_add(ctx
, pc
, offset
);
4533 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4537 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4538 addr
= addr_add(ctx
, pc
, offset
);
4539 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4541 #if defined(TARGET_MIPS64)
4544 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4545 addr
= addr_add(ctx
, pc
, offset
);
4546 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4550 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4553 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4554 addr
= addr_add(ctx
, pc
, offset
);
4555 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4560 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4561 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4562 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4565 #if defined(TARGET_MIPS64)
4566 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4567 case R6_OPC_LDPC
+ (1 << 16):
4568 case R6_OPC_LDPC
+ (2 << 16):
4569 case R6_OPC_LDPC
+ (3 << 16):
4571 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4572 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4573 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4577 MIPS_INVAL("OPC_PCREL");
4578 generate_exception_end(ctx
, EXCP_RI
);
4585 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4594 t0
= tcg_temp_new();
4595 t1
= tcg_temp_new();
4597 gen_load_gpr(t0
, rs
);
4598 gen_load_gpr(t1
, rt
);
4603 TCGv t2
= tcg_temp_new();
4604 TCGv t3
= tcg_temp_new();
4605 tcg_gen_ext32s_tl(t0
, t0
);
4606 tcg_gen_ext32s_tl(t1
, t1
);
4607 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4608 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4609 tcg_gen_and_tl(t2
, t2
, t3
);
4610 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4611 tcg_gen_or_tl(t2
, t2
, t3
);
4612 tcg_gen_movi_tl(t3
, 0);
4613 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4614 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4615 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4622 TCGv t2
= tcg_temp_new();
4623 TCGv t3
= tcg_temp_new();
4624 tcg_gen_ext32s_tl(t0
, t0
);
4625 tcg_gen_ext32s_tl(t1
, t1
);
4626 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4627 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4628 tcg_gen_and_tl(t2
, t2
, t3
);
4629 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4630 tcg_gen_or_tl(t2
, t2
, t3
);
4631 tcg_gen_movi_tl(t3
, 0);
4632 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4633 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4634 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4641 TCGv t2
= tcg_const_tl(0);
4642 TCGv t3
= tcg_const_tl(1);
4643 tcg_gen_ext32u_tl(t0
, t0
);
4644 tcg_gen_ext32u_tl(t1
, t1
);
4645 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4646 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4647 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4654 TCGv t2
= tcg_const_tl(0);
4655 TCGv t3
= tcg_const_tl(1);
4656 tcg_gen_ext32u_tl(t0
, t0
);
4657 tcg_gen_ext32u_tl(t1
, t1
);
4658 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4659 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4660 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4667 TCGv_i32 t2
= tcg_temp_new_i32();
4668 TCGv_i32 t3
= tcg_temp_new_i32();
4669 tcg_gen_trunc_tl_i32(t2
, t0
);
4670 tcg_gen_trunc_tl_i32(t3
, t1
);
4671 tcg_gen_mul_i32(t2
, t2
, t3
);
4672 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4673 tcg_temp_free_i32(t2
);
4674 tcg_temp_free_i32(t3
);
4679 TCGv_i32 t2
= tcg_temp_new_i32();
4680 TCGv_i32 t3
= tcg_temp_new_i32();
4681 tcg_gen_trunc_tl_i32(t2
, t0
);
4682 tcg_gen_trunc_tl_i32(t3
, t1
);
4683 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4684 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4685 tcg_temp_free_i32(t2
);
4686 tcg_temp_free_i32(t3
);
4691 TCGv_i32 t2
= tcg_temp_new_i32();
4692 TCGv_i32 t3
= tcg_temp_new_i32();
4693 tcg_gen_trunc_tl_i32(t2
, t0
);
4694 tcg_gen_trunc_tl_i32(t3
, t1
);
4695 tcg_gen_mul_i32(t2
, t2
, t3
);
4696 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4697 tcg_temp_free_i32(t2
);
4698 tcg_temp_free_i32(t3
);
4703 TCGv_i32 t2
= tcg_temp_new_i32();
4704 TCGv_i32 t3
= tcg_temp_new_i32();
4705 tcg_gen_trunc_tl_i32(t2
, t0
);
4706 tcg_gen_trunc_tl_i32(t3
, t1
);
4707 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4708 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4709 tcg_temp_free_i32(t2
);
4710 tcg_temp_free_i32(t3
);
4713 #if defined(TARGET_MIPS64)
4716 TCGv t2
= tcg_temp_new();
4717 TCGv t3
= tcg_temp_new();
4718 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4719 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4720 tcg_gen_and_tl(t2
, t2
, t3
);
4721 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4722 tcg_gen_or_tl(t2
, t2
, t3
);
4723 tcg_gen_movi_tl(t3
, 0);
4724 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4725 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4732 TCGv t2
= tcg_temp_new();
4733 TCGv t3
= tcg_temp_new();
4734 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4735 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4736 tcg_gen_and_tl(t2
, t2
, t3
);
4737 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4738 tcg_gen_or_tl(t2
, t2
, t3
);
4739 tcg_gen_movi_tl(t3
, 0);
4740 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4741 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4748 TCGv t2
= tcg_const_tl(0);
4749 TCGv t3
= tcg_const_tl(1);
4750 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4751 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4758 TCGv t2
= tcg_const_tl(0);
4759 TCGv t3
= tcg_const_tl(1);
4760 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4761 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4767 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4771 TCGv t2
= tcg_temp_new();
4772 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4777 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4781 TCGv t2
= tcg_temp_new();
4782 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4788 MIPS_INVAL("r6 mul/div");
4789 generate_exception_end(ctx
, EXCP_RI
);
4797 #if defined(TARGET_MIPS64)
4798 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4802 t0
= tcg_temp_new();
4803 t1
= tcg_temp_new();
4805 gen_load_gpr(t0
, rs
);
4806 gen_load_gpr(t1
, rt
);
4811 TCGv t2
= tcg_temp_new();
4812 TCGv t3
= tcg_temp_new();
4813 tcg_gen_ext32s_tl(t0
, t0
);
4814 tcg_gen_ext32s_tl(t1
, t1
);
4815 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4816 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4817 tcg_gen_and_tl(t2
, t2
, t3
);
4818 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4819 tcg_gen_or_tl(t2
, t2
, t3
);
4820 tcg_gen_movi_tl(t3
, 0);
4821 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4822 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4823 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4824 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4825 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4832 TCGv t2
= tcg_const_tl(0);
4833 TCGv t3
= tcg_const_tl(1);
4834 tcg_gen_ext32u_tl(t0
, t0
);
4835 tcg_gen_ext32u_tl(t1
, t1
);
4836 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4837 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4838 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4839 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4840 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4846 MIPS_INVAL("div1 TX79");
4847 generate_exception_end(ctx
, EXCP_RI
);
4856 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4857 int acc
, int rs
, int rt
)
4861 t0
= tcg_temp_new();
4862 t1
= tcg_temp_new();
4864 gen_load_gpr(t0
, rs
);
4865 gen_load_gpr(t1
, rt
);
4874 TCGv t2
= tcg_temp_new();
4875 TCGv t3
= tcg_temp_new();
4876 tcg_gen_ext32s_tl(t0
, t0
);
4877 tcg_gen_ext32s_tl(t1
, t1
);
4878 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4879 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4880 tcg_gen_and_tl(t2
, t2
, t3
);
4881 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4882 tcg_gen_or_tl(t2
, t2
, t3
);
4883 tcg_gen_movi_tl(t3
, 0);
4884 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4885 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4886 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4887 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4888 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4895 TCGv t2
= tcg_const_tl(0);
4896 TCGv t3
= tcg_const_tl(1);
4897 tcg_gen_ext32u_tl(t0
, t0
);
4898 tcg_gen_ext32u_tl(t1
, t1
);
4899 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4900 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4901 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4902 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4903 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4910 TCGv_i32 t2
= tcg_temp_new_i32();
4911 TCGv_i32 t3
= tcg_temp_new_i32();
4912 tcg_gen_trunc_tl_i32(t2
, t0
);
4913 tcg_gen_trunc_tl_i32(t3
, t1
);
4914 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4915 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4916 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4917 tcg_temp_free_i32(t2
);
4918 tcg_temp_free_i32(t3
);
4923 TCGv_i32 t2
= tcg_temp_new_i32();
4924 TCGv_i32 t3
= tcg_temp_new_i32();
4925 tcg_gen_trunc_tl_i32(t2
, t0
);
4926 tcg_gen_trunc_tl_i32(t3
, t1
);
4927 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4928 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4929 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4930 tcg_temp_free_i32(t2
);
4931 tcg_temp_free_i32(t3
);
4934 #if defined(TARGET_MIPS64)
4937 TCGv t2
= tcg_temp_new();
4938 TCGv t3
= tcg_temp_new();
4939 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4940 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4941 tcg_gen_and_tl(t2
, t2
, t3
);
4942 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4943 tcg_gen_or_tl(t2
, t2
, t3
);
4944 tcg_gen_movi_tl(t3
, 0);
4945 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4946 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4947 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4954 TCGv t2
= tcg_const_tl(0);
4955 TCGv t3
= tcg_const_tl(1);
4956 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4957 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4958 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4964 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4967 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4972 TCGv_i64 t2
= tcg_temp_new_i64();
4973 TCGv_i64 t3
= tcg_temp_new_i64();
4975 tcg_gen_ext_tl_i64(t2
, t0
);
4976 tcg_gen_ext_tl_i64(t3
, t1
);
4977 tcg_gen_mul_i64(t2
, t2
, t3
);
4978 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4979 tcg_gen_add_i64(t2
, t2
, t3
);
4980 tcg_temp_free_i64(t3
);
4981 gen_move_low32(cpu_LO
[acc
], t2
);
4982 gen_move_high32(cpu_HI
[acc
], t2
);
4983 tcg_temp_free_i64(t2
);
4988 TCGv_i64 t2
= tcg_temp_new_i64();
4989 TCGv_i64 t3
= tcg_temp_new_i64();
4991 tcg_gen_ext32u_tl(t0
, t0
);
4992 tcg_gen_ext32u_tl(t1
, t1
);
4993 tcg_gen_extu_tl_i64(t2
, t0
);
4994 tcg_gen_extu_tl_i64(t3
, t1
);
4995 tcg_gen_mul_i64(t2
, t2
, t3
);
4996 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4997 tcg_gen_add_i64(t2
, t2
, t3
);
4998 tcg_temp_free_i64(t3
);
4999 gen_move_low32(cpu_LO
[acc
], t2
);
5000 gen_move_high32(cpu_HI
[acc
], t2
);
5001 tcg_temp_free_i64(t2
);
5006 TCGv_i64 t2
= tcg_temp_new_i64();
5007 TCGv_i64 t3
= tcg_temp_new_i64();
5009 tcg_gen_ext_tl_i64(t2
, t0
);
5010 tcg_gen_ext_tl_i64(t3
, t1
);
5011 tcg_gen_mul_i64(t2
, t2
, t3
);
5012 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5013 tcg_gen_sub_i64(t2
, t3
, t2
);
5014 tcg_temp_free_i64(t3
);
5015 gen_move_low32(cpu_LO
[acc
], t2
);
5016 gen_move_high32(cpu_HI
[acc
], t2
);
5017 tcg_temp_free_i64(t2
);
5022 TCGv_i64 t2
= tcg_temp_new_i64();
5023 TCGv_i64 t3
= tcg_temp_new_i64();
5025 tcg_gen_ext32u_tl(t0
, t0
);
5026 tcg_gen_ext32u_tl(t1
, t1
);
5027 tcg_gen_extu_tl_i64(t2
, t0
);
5028 tcg_gen_extu_tl_i64(t3
, t1
);
5029 tcg_gen_mul_i64(t2
, t2
, t3
);
5030 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5031 tcg_gen_sub_i64(t2
, t3
, t2
);
5032 tcg_temp_free_i64(t3
);
5033 gen_move_low32(cpu_LO
[acc
], t2
);
5034 gen_move_high32(cpu_HI
[acc
], t2
);
5035 tcg_temp_free_i64(t2
);
5039 MIPS_INVAL("mul/div");
5040 generate_exception_end(ctx
, EXCP_RI
);
5049 * These MULT[U] and MADD[U] instructions implemented in for example
5050 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5051 * architectures are special three-operand variants with the syntax
5053 * MULT[U][1] rd, rs, rt
5057 * (rd, LO, HI) <- rs * rt
5061 * MADD[U][1] rd, rs, rt
5065 * (rd, LO, HI) <- (LO, HI) + rs * rt
5067 * where the low-order 32-bits of the result is placed into both the
5068 * GPR rd and the special register LO. The high-order 32-bits of the
5069 * result is placed into the special register HI.
5071 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5072 * which is the zero register that always reads as 0.
5074 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5075 int rd
, int rs
, int rt
)
5077 TCGv t0
= tcg_temp_new();
5078 TCGv t1
= tcg_temp_new();
5081 gen_load_gpr(t0
, rs
);
5082 gen_load_gpr(t1
, rt
);
5090 TCGv_i32 t2
= tcg_temp_new_i32();
5091 TCGv_i32 t3
= tcg_temp_new_i32();
5092 tcg_gen_trunc_tl_i32(t2
, t0
);
5093 tcg_gen_trunc_tl_i32(t3
, t1
);
5094 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5096 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5098 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5099 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5100 tcg_temp_free_i32(t2
);
5101 tcg_temp_free_i32(t3
);
5104 case MMI_OPC_MULTU1
:
5109 TCGv_i32 t2
= tcg_temp_new_i32();
5110 TCGv_i32 t3
= tcg_temp_new_i32();
5111 tcg_gen_trunc_tl_i32(t2
, t0
);
5112 tcg_gen_trunc_tl_i32(t3
, t1
);
5113 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5115 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5117 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5118 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5119 tcg_temp_free_i32(t2
);
5120 tcg_temp_free_i32(t3
);
5128 TCGv_i64 t2
= tcg_temp_new_i64();
5129 TCGv_i64 t3
= tcg_temp_new_i64();
5131 tcg_gen_ext_tl_i64(t2
, t0
);
5132 tcg_gen_ext_tl_i64(t3
, t1
);
5133 tcg_gen_mul_i64(t2
, t2
, t3
);
5134 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5135 tcg_gen_add_i64(t2
, t2
, t3
);
5136 tcg_temp_free_i64(t3
);
5137 gen_move_low32(cpu_LO
[acc
], t2
);
5138 gen_move_high32(cpu_HI
[acc
], t2
);
5140 gen_move_low32(cpu_gpr
[rd
], t2
);
5142 tcg_temp_free_i64(t2
);
5145 case MMI_OPC_MADDU1
:
5150 TCGv_i64 t2
= tcg_temp_new_i64();
5151 TCGv_i64 t3
= tcg_temp_new_i64();
5153 tcg_gen_ext32u_tl(t0
, t0
);
5154 tcg_gen_ext32u_tl(t1
, t1
);
5155 tcg_gen_extu_tl_i64(t2
, t0
);
5156 tcg_gen_extu_tl_i64(t3
, t1
);
5157 tcg_gen_mul_i64(t2
, t2
, t3
);
5158 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5159 tcg_gen_add_i64(t2
, t2
, t3
);
5160 tcg_temp_free_i64(t3
);
5161 gen_move_low32(cpu_LO
[acc
], t2
);
5162 gen_move_high32(cpu_HI
[acc
], t2
);
5164 gen_move_low32(cpu_gpr
[rd
], t2
);
5166 tcg_temp_free_i64(t2
);
5170 MIPS_INVAL("mul/madd TXx9");
5171 generate_exception_end(ctx
, EXCP_RI
);
5180 static void gen_mul_vr54xx(DisasContext
*ctx
, uint32_t opc
,
5181 int rd
, int rs
, int rt
)
5183 TCGv t0
= tcg_temp_new();
5184 TCGv t1
= tcg_temp_new();
5186 gen_load_gpr(t0
, rs
);
5187 gen_load_gpr(t1
, rt
);
5190 case OPC_VR54XX_MULS
:
5191 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5193 case OPC_VR54XX_MULSU
:
5194 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5196 case OPC_VR54XX_MACC
:
5197 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5199 case OPC_VR54XX_MACCU
:
5200 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5202 case OPC_VR54XX_MSAC
:
5203 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5205 case OPC_VR54XX_MSACU
:
5206 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5208 case OPC_VR54XX_MULHI
:
5209 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5211 case OPC_VR54XX_MULHIU
:
5212 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5214 case OPC_VR54XX_MULSHI
:
5215 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5217 case OPC_VR54XX_MULSHIU
:
5218 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5220 case OPC_VR54XX_MACCHI
:
5221 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5223 case OPC_VR54XX_MACCHIU
:
5224 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5226 case OPC_VR54XX_MSACHI
:
5227 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5229 case OPC_VR54XX_MSACHIU
:
5230 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5233 MIPS_INVAL("mul vr54xx");
5234 generate_exception_end(ctx
, EXCP_RI
);
5237 gen_store_gpr(t0
, rd
);
5244 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
5254 gen_load_gpr(t0
, rs
);
5259 #if defined(TARGET_MIPS64)
5263 tcg_gen_not_tl(t0
, t0
);
5272 tcg_gen_ext32u_tl(t0
, t0
);
5273 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5274 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5276 #if defined(TARGET_MIPS64)
5281 tcg_gen_clzi_i64(t0
, t0
, 64);
5287 /* Godson integer instructions */
5288 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5289 int rd
, int rs
, int rt
)
5301 case OPC_MULTU_G_2E
:
5302 case OPC_MULTU_G_2F
:
5303 #if defined(TARGET_MIPS64)
5304 case OPC_DMULT_G_2E
:
5305 case OPC_DMULT_G_2F
:
5306 case OPC_DMULTU_G_2E
:
5307 case OPC_DMULTU_G_2F
:
5309 t0
= tcg_temp_new();
5310 t1
= tcg_temp_new();
5313 t0
= tcg_temp_local_new();
5314 t1
= tcg_temp_local_new();
5318 gen_load_gpr(t0
, rs
);
5319 gen_load_gpr(t1
, rt
);
5324 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5325 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5327 case OPC_MULTU_G_2E
:
5328 case OPC_MULTU_G_2F
:
5329 tcg_gen_ext32u_tl(t0
, t0
);
5330 tcg_gen_ext32u_tl(t1
, t1
);
5331 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5332 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5337 TCGLabel
*l1
= gen_new_label();
5338 TCGLabel
*l2
= gen_new_label();
5339 TCGLabel
*l3
= gen_new_label();
5340 tcg_gen_ext32s_tl(t0
, t0
);
5341 tcg_gen_ext32s_tl(t1
, t1
);
5342 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5343 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5346 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5347 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5348 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5351 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5352 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5359 TCGLabel
*l1
= gen_new_label();
5360 TCGLabel
*l2
= gen_new_label();
5361 tcg_gen_ext32u_tl(t0
, t0
);
5362 tcg_gen_ext32u_tl(t1
, t1
);
5363 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5364 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5367 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5368 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5375 TCGLabel
*l1
= gen_new_label();
5376 TCGLabel
*l2
= gen_new_label();
5377 TCGLabel
*l3
= gen_new_label();
5378 tcg_gen_ext32u_tl(t0
, t0
);
5379 tcg_gen_ext32u_tl(t1
, t1
);
5380 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5381 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5382 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5384 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5387 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5388 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5395 TCGLabel
*l1
= gen_new_label();
5396 TCGLabel
*l2
= gen_new_label();
5397 tcg_gen_ext32u_tl(t0
, t0
);
5398 tcg_gen_ext32u_tl(t1
, t1
);
5399 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5400 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5403 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5404 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5408 #if defined(TARGET_MIPS64)
5409 case OPC_DMULT_G_2E
:
5410 case OPC_DMULT_G_2F
:
5411 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5413 case OPC_DMULTU_G_2E
:
5414 case OPC_DMULTU_G_2F
:
5415 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5420 TCGLabel
*l1
= gen_new_label();
5421 TCGLabel
*l2
= gen_new_label();
5422 TCGLabel
*l3
= gen_new_label();
5423 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5424 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5427 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5428 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5429 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5432 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5436 case OPC_DDIVU_G_2E
:
5437 case OPC_DDIVU_G_2F
:
5439 TCGLabel
*l1
= gen_new_label();
5440 TCGLabel
*l2
= gen_new_label();
5441 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5442 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5445 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5452 TCGLabel
*l1
= gen_new_label();
5453 TCGLabel
*l2
= gen_new_label();
5454 TCGLabel
*l3
= gen_new_label();
5455 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5456 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5457 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5459 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5462 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5466 case OPC_DMODU_G_2E
:
5467 case OPC_DMODU_G_2F
:
5469 TCGLabel
*l1
= gen_new_label();
5470 TCGLabel
*l2
= gen_new_label();
5471 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5472 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5475 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5486 /* Loongson multimedia instructions */
5487 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5489 uint32_t opc
, shift_max
;
5492 opc
= MASK_LMI(ctx
->opcode
);
5498 t0
= tcg_temp_local_new_i64();
5499 t1
= tcg_temp_local_new_i64();
5502 t0
= tcg_temp_new_i64();
5503 t1
= tcg_temp_new_i64();
5507 check_cp1_enabled(ctx
);
5508 gen_load_fpr64(ctx
, t0
, rs
);
5509 gen_load_fpr64(ctx
, t1
, rt
);
5511 #define LMI_HELPER(UP, LO) \
5512 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5513 #define LMI_HELPER_1(UP, LO) \
5514 case OPC_##UP: gen_helper_##LO(t0, t0); break
5515 #define LMI_DIRECT(UP, LO, OP) \
5516 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5519 LMI_HELPER(PADDSH
, paddsh
);
5520 LMI_HELPER(PADDUSH
, paddush
);
5521 LMI_HELPER(PADDH
, paddh
);
5522 LMI_HELPER(PADDW
, paddw
);
5523 LMI_HELPER(PADDSB
, paddsb
);
5524 LMI_HELPER(PADDUSB
, paddusb
);
5525 LMI_HELPER(PADDB
, paddb
);
5527 LMI_HELPER(PSUBSH
, psubsh
);
5528 LMI_HELPER(PSUBUSH
, psubush
);
5529 LMI_HELPER(PSUBH
, psubh
);
5530 LMI_HELPER(PSUBW
, psubw
);
5531 LMI_HELPER(PSUBSB
, psubsb
);
5532 LMI_HELPER(PSUBUSB
, psubusb
);
5533 LMI_HELPER(PSUBB
, psubb
);
5535 LMI_HELPER(PSHUFH
, pshufh
);
5536 LMI_HELPER(PACKSSWH
, packsswh
);
5537 LMI_HELPER(PACKSSHB
, packsshb
);
5538 LMI_HELPER(PACKUSHB
, packushb
);
5540 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5541 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5542 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5543 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5544 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5545 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5547 LMI_HELPER(PAVGH
, pavgh
);
5548 LMI_HELPER(PAVGB
, pavgb
);
5549 LMI_HELPER(PMAXSH
, pmaxsh
);
5550 LMI_HELPER(PMINSH
, pminsh
);
5551 LMI_HELPER(PMAXUB
, pmaxub
);
5552 LMI_HELPER(PMINUB
, pminub
);
5554 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5555 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5556 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5557 LMI_HELPER(PCMPGTH
, pcmpgth
);
5558 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5559 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5561 LMI_HELPER(PSLLW
, psllw
);
5562 LMI_HELPER(PSLLH
, psllh
);
5563 LMI_HELPER(PSRLW
, psrlw
);
5564 LMI_HELPER(PSRLH
, psrlh
);
5565 LMI_HELPER(PSRAW
, psraw
);
5566 LMI_HELPER(PSRAH
, psrah
);
5568 LMI_HELPER(PMULLH
, pmullh
);
5569 LMI_HELPER(PMULHH
, pmulhh
);
5570 LMI_HELPER(PMULHUH
, pmulhuh
);
5571 LMI_HELPER(PMADDHW
, pmaddhw
);
5573 LMI_HELPER(PASUBUB
, pasubub
);
5574 LMI_HELPER_1(BIADD
, biadd
);
5575 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5577 LMI_DIRECT(PADDD
, paddd
, add
);
5578 LMI_DIRECT(PSUBD
, psubd
, sub
);
5579 LMI_DIRECT(XOR_CP2
, xor, xor);
5580 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5581 LMI_DIRECT(AND_CP2
, and, and);
5582 LMI_DIRECT(OR_CP2
, or, or);
5585 tcg_gen_andc_i64(t0
, t1
, t0
);
5589 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5592 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5595 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5598 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5602 tcg_gen_andi_i64(t1
, t1
, 3);
5603 tcg_gen_shli_i64(t1
, t1
, 4);
5604 tcg_gen_shr_i64(t0
, t0
, t1
);
5605 tcg_gen_ext16u_i64(t0
, t0
);
5609 tcg_gen_add_i64(t0
, t0
, t1
);
5610 tcg_gen_ext32s_i64(t0
, t0
);
5613 tcg_gen_sub_i64(t0
, t0
, t1
);
5614 tcg_gen_ext32s_i64(t0
, t0
);
5636 /* Make sure shift count isn't TCG undefined behaviour. */
5637 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5642 tcg_gen_shl_i64(t0
, t0
, t1
);
5646 /* Since SRA is UndefinedResult without sign-extended inputs,
5647 we can treat SRA and DSRA the same. */
5648 tcg_gen_sar_i64(t0
, t0
, t1
);
5651 /* We want to shift in zeros for SRL; zero-extend first. */
5652 tcg_gen_ext32u_i64(t0
, t0
);
5655 tcg_gen_shr_i64(t0
, t0
, t1
);
5659 if (shift_max
== 32) {
5660 tcg_gen_ext32s_i64(t0
, t0
);
5663 /* Shifts larger than MAX produce zero. */
5664 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5665 tcg_gen_neg_i64(t1
, t1
);
5666 tcg_gen_and_i64(t0
, t0
, t1
);
5672 TCGv_i64 t2
= tcg_temp_new_i64();
5673 TCGLabel
*lab
= gen_new_label();
5675 tcg_gen_mov_i64(t2
, t0
);
5676 tcg_gen_add_i64(t0
, t1
, t2
);
5677 if (opc
== OPC_ADD_CP2
) {
5678 tcg_gen_ext32s_i64(t0
, t0
);
5680 tcg_gen_xor_i64(t1
, t1
, t2
);
5681 tcg_gen_xor_i64(t2
, t2
, t0
);
5682 tcg_gen_andc_i64(t1
, t2
, t1
);
5683 tcg_temp_free_i64(t2
);
5684 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5685 generate_exception(ctx
, EXCP_OVERFLOW
);
5693 TCGv_i64 t2
= tcg_temp_new_i64();
5694 TCGLabel
*lab
= gen_new_label();
5696 tcg_gen_mov_i64(t2
, t0
);
5697 tcg_gen_sub_i64(t0
, t1
, t2
);
5698 if (opc
== OPC_SUB_CP2
) {
5699 tcg_gen_ext32s_i64(t0
, t0
);
5701 tcg_gen_xor_i64(t1
, t1
, t2
);
5702 tcg_gen_xor_i64(t2
, t2
, t0
);
5703 tcg_gen_and_i64(t1
, t1
, t2
);
5704 tcg_temp_free_i64(t2
);
5705 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5706 generate_exception(ctx
, EXCP_OVERFLOW
);
5712 tcg_gen_ext32u_i64(t0
, t0
);
5713 tcg_gen_ext32u_i64(t1
, t1
);
5714 tcg_gen_mul_i64(t0
, t0
, t1
);
5723 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
5724 FD field is the CC field? */
5726 MIPS_INVAL("loongson_cp2");
5727 generate_exception_end(ctx
, EXCP_RI
);
5734 gen_store_fpr64(ctx
, t0
, rd
);
5736 tcg_temp_free_i64(t0
);
5737 tcg_temp_free_i64(t1
);
5741 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5742 int rs
, int rt
, int16_t imm
)
5745 TCGv t0
= tcg_temp_new();
5746 TCGv t1
= tcg_temp_new();
5749 /* Load needed operands */
5757 /* Compare two registers */
5759 gen_load_gpr(t0
, rs
);
5760 gen_load_gpr(t1
, rt
);
5770 /* Compare register to immediate */
5771 if (rs
!= 0 || imm
!= 0) {
5772 gen_load_gpr(t0
, rs
);
5773 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5780 case OPC_TEQ
: /* rs == rs */
5781 case OPC_TEQI
: /* r0 == 0 */
5782 case OPC_TGE
: /* rs >= rs */
5783 case OPC_TGEI
: /* r0 >= 0 */
5784 case OPC_TGEU
: /* rs >= rs unsigned */
5785 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5787 generate_exception_end(ctx
, EXCP_TRAP
);
5789 case OPC_TLT
: /* rs < rs */
5790 case OPC_TLTI
: /* r0 < 0 */
5791 case OPC_TLTU
: /* rs < rs unsigned */
5792 case OPC_TLTIU
: /* r0 < 0 unsigned */
5793 case OPC_TNE
: /* rs != rs */
5794 case OPC_TNEI
: /* r0 != 0 */
5795 /* Never trap: treat as NOP. */
5799 TCGLabel
*l1
= gen_new_label();
5804 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5808 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5812 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5816 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5820 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5824 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5827 generate_exception(ctx
, EXCP_TRAP
);
5834 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5836 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5840 #ifndef CONFIG_USER_ONLY
5841 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5847 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5849 if (use_goto_tb(ctx
, dest
)) {
5852 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5855 if (ctx
->base
.singlestep_enabled
) {
5856 save_cpu_state(ctx
, 0);
5857 gen_helper_raise_exception_debug(cpu_env
);
5859 tcg_gen_lookup_and_goto_ptr();
5863 /* Branches (before delay slot) */
5864 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5866 int rs
, int rt
, int32_t offset
,
5869 target_ulong btgt
= -1;
5871 int bcond_compute
= 0;
5872 TCGv t0
= tcg_temp_new();
5873 TCGv t1
= tcg_temp_new();
5875 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5876 #ifdef MIPS_DEBUG_DISAS
5877 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5878 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5880 generate_exception_end(ctx
, EXCP_RI
);
5884 /* Load needed operands */
5890 /* Compare two registers */
5892 gen_load_gpr(t0
, rs
);
5893 gen_load_gpr(t1
, rt
);
5896 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5910 /* Compare to zero */
5912 gen_load_gpr(t0
, rs
);
5915 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5918 #if defined(TARGET_MIPS64)
5920 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5922 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5925 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5930 /* Jump to immediate */
5931 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5936 /* Jump to register */
5937 if (offset
!= 0 && offset
!= 16) {
5938 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5939 others are reserved. */
5940 MIPS_INVAL("jump hint");
5941 generate_exception_end(ctx
, EXCP_RI
);
5944 gen_load_gpr(btarget
, rs
);
5947 MIPS_INVAL("branch/jump");
5948 generate_exception_end(ctx
, EXCP_RI
);
5951 if (bcond_compute
== 0) {
5952 /* No condition to be computed */
5954 case OPC_BEQ
: /* rx == rx */
5955 case OPC_BEQL
: /* rx == rx likely */
5956 case OPC_BGEZ
: /* 0 >= 0 */
5957 case OPC_BGEZL
: /* 0 >= 0 likely */
5958 case OPC_BLEZ
: /* 0 <= 0 */
5959 case OPC_BLEZL
: /* 0 <= 0 likely */
5961 ctx
->hflags
|= MIPS_HFLAG_B
;
5963 case OPC_BGEZAL
: /* 0 >= 0 */
5964 case OPC_BGEZALL
: /* 0 >= 0 likely */
5965 /* Always take and link */
5967 ctx
->hflags
|= MIPS_HFLAG_B
;
5969 case OPC_BNE
: /* rx != rx */
5970 case OPC_BGTZ
: /* 0 > 0 */
5971 case OPC_BLTZ
: /* 0 < 0 */
5974 case OPC_BLTZAL
: /* 0 < 0 */
5975 /* Handle as an unconditional branch to get correct delay
5978 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5979 ctx
->hflags
|= MIPS_HFLAG_B
;
5981 case OPC_BLTZALL
: /* 0 < 0 likely */
5982 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5983 /* Skip the instruction in the delay slot */
5984 ctx
->base
.pc_next
+= 4;
5986 case OPC_BNEL
: /* rx != rx likely */
5987 case OPC_BGTZL
: /* 0 > 0 likely */
5988 case OPC_BLTZL
: /* 0 < 0 likely */
5989 /* Skip the instruction in the delay slot */
5990 ctx
->base
.pc_next
+= 4;
5993 ctx
->hflags
|= MIPS_HFLAG_B
;
5996 ctx
->hflags
|= MIPS_HFLAG_BX
;
6000 ctx
->hflags
|= MIPS_HFLAG_B
;
6003 ctx
->hflags
|= MIPS_HFLAG_BR
;
6007 ctx
->hflags
|= MIPS_HFLAG_BR
;
6010 MIPS_INVAL("branch/jump");
6011 generate_exception_end(ctx
, EXCP_RI
);
6017 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6020 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6023 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6026 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6029 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6032 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6035 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6039 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6043 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6046 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6049 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6052 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6055 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6058 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6061 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6063 #if defined(TARGET_MIPS64)
6065 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6069 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6072 ctx
->hflags
|= MIPS_HFLAG_BC
;
6075 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6078 ctx
->hflags
|= MIPS_HFLAG_BL
;
6081 MIPS_INVAL("conditional branch/jump");
6082 generate_exception_end(ctx
, EXCP_RI
);
6087 ctx
->btarget
= btgt
;
6089 switch (delayslot_size
) {
6091 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6094 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6099 int post_delay
= insn_bytes
+ delayslot_size
;
6100 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6102 tcg_gen_movi_tl(cpu_gpr
[blink
],
6103 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6107 if (insn_bytes
== 2) {
6108 ctx
->hflags
|= MIPS_HFLAG_B16
;
6115 /* nanoMIPS Branches */
6116 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6118 int rs
, int rt
, int32_t offset
)
6120 target_ulong btgt
= -1;
6121 int bcond_compute
= 0;
6122 TCGv t0
= tcg_temp_new();
6123 TCGv t1
= tcg_temp_new();
6125 /* Load needed operands */
6129 /* Compare two registers */
6131 gen_load_gpr(t0
, rs
);
6132 gen_load_gpr(t1
, rt
);
6135 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6138 /* Compare to zero */
6140 gen_load_gpr(t0
, rs
);
6143 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6146 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6148 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6152 /* Jump to register */
6153 if (offset
!= 0 && offset
!= 16) {
6154 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6155 others are reserved. */
6156 MIPS_INVAL("jump hint");
6157 generate_exception_end(ctx
, EXCP_RI
);
6160 gen_load_gpr(btarget
, rs
);
6163 MIPS_INVAL("branch/jump");
6164 generate_exception_end(ctx
, EXCP_RI
);
6167 if (bcond_compute
== 0) {
6168 /* No condition to be computed */
6170 case OPC_BEQ
: /* rx == rx */
6172 ctx
->hflags
|= MIPS_HFLAG_B
;
6174 case OPC_BGEZAL
: /* 0 >= 0 */
6175 /* Always take and link */
6176 tcg_gen_movi_tl(cpu_gpr
[31],
6177 ctx
->base
.pc_next
+ insn_bytes
);
6178 ctx
->hflags
|= MIPS_HFLAG_B
;
6180 case OPC_BNE
: /* rx != rx */
6181 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6182 /* Skip the instruction in the delay slot */
6183 ctx
->base
.pc_next
+= 4;
6186 ctx
->hflags
|= MIPS_HFLAG_BR
;
6190 tcg_gen_movi_tl(cpu_gpr
[rt
],
6191 ctx
->base
.pc_next
+ insn_bytes
);
6193 ctx
->hflags
|= MIPS_HFLAG_BR
;
6196 MIPS_INVAL("branch/jump");
6197 generate_exception_end(ctx
, EXCP_RI
);
6203 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6206 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6209 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6210 tcg_gen_movi_tl(cpu_gpr
[31],
6211 ctx
->base
.pc_next
+ insn_bytes
);
6214 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6216 ctx
->hflags
|= MIPS_HFLAG_BC
;
6219 MIPS_INVAL("conditional branch/jump");
6220 generate_exception_end(ctx
, EXCP_RI
);
6225 ctx
->btarget
= btgt
;
6228 if (insn_bytes
== 2) {
6229 ctx
->hflags
|= MIPS_HFLAG_B16
;
6236 /* special3 bitfield operations */
6237 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
6238 int rs
, int lsb
, int msb
)
6240 TCGv t0
= tcg_temp_new();
6241 TCGv t1
= tcg_temp_new();
6243 gen_load_gpr(t1
, rs
);
6246 if (lsb
+ msb
> 31) {
6250 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6252 /* The two checks together imply that lsb == 0,
6253 so this is a simple sign-extension. */
6254 tcg_gen_ext32s_tl(t0
, t1
);
6257 #if defined(TARGET_MIPS64)
6266 if (lsb
+ msb
> 63) {
6269 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6276 gen_load_gpr(t0
, rt
);
6277 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6278 tcg_gen_ext32s_tl(t0
, t0
);
6280 #if defined(TARGET_MIPS64)
6291 gen_load_gpr(t0
, rt
);
6292 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6297 MIPS_INVAL("bitops");
6298 generate_exception_end(ctx
, EXCP_RI
);
6303 gen_store_gpr(t0
, rt
);
6308 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6313 /* If no destination, treat it as a NOP. */
6317 t0
= tcg_temp_new();
6318 gen_load_gpr(t0
, rt
);
6322 TCGv t1
= tcg_temp_new();
6323 TCGv t2
= tcg_const_tl(0x00FF00FF);
6325 tcg_gen_shri_tl(t1
, t0
, 8);
6326 tcg_gen_and_tl(t1
, t1
, t2
);
6327 tcg_gen_and_tl(t0
, t0
, t2
);
6328 tcg_gen_shli_tl(t0
, t0
, 8);
6329 tcg_gen_or_tl(t0
, t0
, t1
);
6332 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6336 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6339 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6341 #if defined(TARGET_MIPS64)
6344 TCGv t1
= tcg_temp_new();
6345 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6347 tcg_gen_shri_tl(t1
, t0
, 8);
6348 tcg_gen_and_tl(t1
, t1
, t2
);
6349 tcg_gen_and_tl(t0
, t0
, t2
);
6350 tcg_gen_shli_tl(t0
, t0
, 8);
6351 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6358 TCGv t1
= tcg_temp_new();
6359 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6361 tcg_gen_shri_tl(t1
, t0
, 16);
6362 tcg_gen_and_tl(t1
, t1
, t2
);
6363 tcg_gen_and_tl(t0
, t0
, t2
);
6364 tcg_gen_shli_tl(t0
, t0
, 16);
6365 tcg_gen_or_tl(t0
, t0
, t1
);
6366 tcg_gen_shri_tl(t1
, t0
, 32);
6367 tcg_gen_shli_tl(t0
, t0
, 32);
6368 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6375 MIPS_INVAL("bsfhl");
6376 generate_exception_end(ctx
, EXCP_RI
);
6383 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6392 t0
= tcg_temp_new();
6393 t1
= tcg_temp_new();
6394 gen_load_gpr(t0
, rs
);
6395 gen_load_gpr(t1
, rt
);
6396 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6397 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6398 if (opc
== OPC_LSA
) {
6399 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6408 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6416 t0
= tcg_temp_new();
6417 if (bits
== 0 || bits
== wordsz
) {
6419 gen_load_gpr(t0
, rt
);
6421 gen_load_gpr(t0
, rs
);
6425 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6427 #if defined(TARGET_MIPS64)
6429 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6434 TCGv t1
= tcg_temp_new();
6435 gen_load_gpr(t0
, rt
);
6436 gen_load_gpr(t1
, rs
);
6440 TCGv_i64 t2
= tcg_temp_new_i64();
6441 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6442 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6443 gen_move_low32(cpu_gpr
[rd
], t2
);
6444 tcg_temp_free_i64(t2
);
6447 #if defined(TARGET_MIPS64)
6449 tcg_gen_shli_tl(t0
, t0
, bits
);
6450 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6451 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6461 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6464 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6467 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6470 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6473 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6480 t0
= tcg_temp_new();
6481 gen_load_gpr(t0
, rt
);
6484 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6486 #if defined(TARGET_MIPS64)
6488 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6495 #ifndef CONFIG_USER_ONLY
6496 /* CP0 (MMU and control) */
6497 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6499 TCGv_i64 t0
= tcg_temp_new_i64();
6500 TCGv_i64 t1
= tcg_temp_new_i64();
6502 tcg_gen_ext_tl_i64(t0
, arg
);
6503 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6504 #if defined(TARGET_MIPS64)
6505 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6507 tcg_gen_concat32_i64(t1
, t1
, t0
);
6509 tcg_gen_st_i64(t1
, cpu_env
, off
);
6510 tcg_temp_free_i64(t1
);
6511 tcg_temp_free_i64(t0
);
6514 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6516 TCGv_i64 t0
= tcg_temp_new_i64();
6517 TCGv_i64 t1
= tcg_temp_new_i64();
6519 tcg_gen_ext_tl_i64(t0
, arg
);
6520 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6521 tcg_gen_concat32_i64(t1
, t1
, t0
);
6522 tcg_gen_st_i64(t1
, cpu_env
, off
);
6523 tcg_temp_free_i64(t1
);
6524 tcg_temp_free_i64(t0
);
6527 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6529 TCGv_i64 t0
= tcg_temp_new_i64();
6531 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6532 #if defined(TARGET_MIPS64)
6533 tcg_gen_shri_i64(t0
, t0
, 30);
6535 tcg_gen_shri_i64(t0
, t0
, 32);
6537 gen_move_low32(arg
, t0
);
6538 tcg_temp_free_i64(t0
);
6541 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6543 TCGv_i64 t0
= tcg_temp_new_i64();
6545 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6546 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6547 gen_move_low32(arg
, t0
);
6548 tcg_temp_free_i64(t0
);
6551 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
6553 TCGv_i32 t0
= tcg_temp_new_i32();
6555 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6556 tcg_gen_ext_i32_tl(arg
, t0
);
6557 tcg_temp_free_i32(t0
);
6560 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
6562 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6563 tcg_gen_ext32s_tl(arg
, arg
);
6566 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
6568 TCGv_i32 t0
= tcg_temp_new_i32();
6570 tcg_gen_trunc_tl_i32(t0
, arg
);
6571 tcg_gen_st_i32(t0
, cpu_env
, off
);
6572 tcg_temp_free_i32(t0
);
6575 #define CP0_CHECK(c) \
6578 goto cp0_unimplemented; \
6582 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6584 const char *register_name
= "invalid";
6587 case CP0_REGISTER_02
:
6590 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6591 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6592 register_name
= "EntryLo0";
6595 goto cp0_unimplemented
;
6598 case CP0_REGISTER_03
:
6601 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6602 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6603 register_name
= "EntryLo1";
6606 goto cp0_unimplemented
;
6609 case CP0_REGISTER_09
:
6612 CP0_CHECK(ctx
->saar
);
6613 gen_helper_mfhc0_saar(arg
, cpu_env
);
6614 register_name
= "SAAR";
6617 goto cp0_unimplemented
;
6620 case CP0_REGISTER_17
:
6623 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6624 ctx
->CP0_LLAddr_shift
);
6625 register_name
= "LLAddr";
6628 CP0_CHECK(ctx
->mrp
);
6629 gen_helper_mfhc0_maar(arg
, cpu_env
);
6630 register_name
= "MAAR";
6633 goto cp0_unimplemented
;
6636 case CP0_REGISTER_28
:
6642 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6643 register_name
= "TagLo";
6646 goto cp0_unimplemented
;
6650 goto cp0_unimplemented
;
6652 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6656 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6657 register_name
, reg
, sel
);
6658 tcg_gen_movi_tl(arg
, 0);
6661 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6663 const char *register_name
= "invalid";
6664 uint64_t mask
= ctx
->PAMask
>> 36;
6667 case CP0_REGISTER_02
:
6670 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6671 tcg_gen_andi_tl(arg
, arg
, mask
);
6672 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6673 register_name
= "EntryLo0";
6676 goto cp0_unimplemented
;
6679 case CP0_REGISTER_03
:
6682 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6683 tcg_gen_andi_tl(arg
, arg
, mask
);
6684 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6685 register_name
= "EntryLo1";
6688 goto cp0_unimplemented
;
6691 case CP0_REGISTER_09
:
6694 CP0_CHECK(ctx
->saar
);
6695 gen_helper_mthc0_saar(cpu_env
, arg
);
6696 register_name
= "SAAR";
6699 goto cp0_unimplemented
;
6701 case CP0_REGISTER_17
:
6704 /* LLAddr is read-only (the only exception is bit 0 if LLB is
6705 supported); the CP0_LLAddr_rw_bitmask does not seem to be
6706 relevant for modern MIPS cores supporting MTHC0, therefore
6707 treating MTHC0 to LLAddr as NOP. */
6708 register_name
= "LLAddr";
6711 CP0_CHECK(ctx
->mrp
);
6712 gen_helper_mthc0_maar(cpu_env
, arg
);
6713 register_name
= "MAAR";
6716 goto cp0_unimplemented
;
6719 case CP0_REGISTER_28
:
6725 tcg_gen_andi_tl(arg
, arg
, mask
);
6726 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6727 register_name
= "TagLo";
6730 goto cp0_unimplemented
;
6734 goto cp0_unimplemented
;
6736 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6739 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6740 register_name
, reg
, sel
);
6743 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6745 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6746 tcg_gen_movi_tl(arg
, 0);
6748 tcg_gen_movi_tl(arg
, ~0);
6752 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6754 const char *register_name
= "invalid";
6757 check_insn(ctx
, ISA_MIPS32
);
6761 case CP0_REGISTER_00
:
6764 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6765 register_name
= "Index";
6768 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6769 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6770 register_name
= "MVPControl";
6773 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6774 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6775 register_name
= "MVPConf0";
6778 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6779 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6780 register_name
= "MVPConf1";
6784 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6785 register_name
= "VPControl";
6788 goto cp0_unimplemented
;
6791 case CP0_REGISTER_01
:
6794 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6795 gen_helper_mfc0_random(arg
, cpu_env
);
6796 register_name
= "Random";
6799 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6800 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6801 register_name
= "VPEControl";
6804 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6805 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6806 register_name
= "VPEConf0";
6809 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6810 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6811 register_name
= "VPEConf1";
6814 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6815 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6816 register_name
= "YQMask";
6819 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6820 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6821 register_name
= "VPESchedule";
6824 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6825 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6826 register_name
= "VPEScheFBack";
6829 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6830 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6831 register_name
= "VPEOpt";
6834 goto cp0_unimplemented
;
6837 case CP0_REGISTER_02
:
6841 TCGv_i64 tmp
= tcg_temp_new_i64();
6842 tcg_gen_ld_i64(tmp
, cpu_env
,
6843 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6844 #if defined(TARGET_MIPS64)
6846 /* Move RI/XI fields to bits 31:30 */
6847 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6848 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6851 gen_move_low32(arg
, tmp
);
6852 tcg_temp_free_i64(tmp
);
6854 register_name
= "EntryLo0";
6857 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6858 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6859 register_name
= "TCStatus";
6862 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6863 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6864 register_name
= "TCBind";
6867 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6868 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6869 register_name
= "TCRestart";
6872 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6873 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6874 register_name
= "TCHalt";
6877 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6878 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6879 register_name
= "TCContext";
6882 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6883 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6884 register_name
= "TCSchedule";
6887 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6888 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6889 register_name
= "TCScheFBack";
6892 goto cp0_unimplemented
;
6895 case CP0_REGISTER_03
:
6899 TCGv_i64 tmp
= tcg_temp_new_i64();
6900 tcg_gen_ld_i64(tmp
, cpu_env
,
6901 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6902 #if defined(TARGET_MIPS64)
6904 /* Move RI/XI fields to bits 31:30 */
6905 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6906 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6909 gen_move_low32(arg
, tmp
);
6910 tcg_temp_free_i64(tmp
);
6912 register_name
= "EntryLo1";
6916 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6917 register_name
= "GlobalNumber";
6920 goto cp0_unimplemented
;
6923 case CP0_REGISTER_04
:
6926 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6927 tcg_gen_ext32s_tl(arg
, arg
);
6928 register_name
= "Context";
6931 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
6932 register_name
= "ContextConfig";
6933 goto cp0_unimplemented
;
6935 CP0_CHECK(ctx
->ulri
);
6936 tcg_gen_ld_tl(arg
, cpu_env
,
6937 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6938 tcg_gen_ext32s_tl(arg
, arg
);
6939 register_name
= "UserLocal";
6942 goto cp0_unimplemented
;
6945 case CP0_REGISTER_05
:
6948 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6949 register_name
= "PageMask";
6952 check_insn(ctx
, ISA_MIPS32R2
);
6953 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6954 register_name
= "PageGrain";
6958 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6959 tcg_gen_ext32s_tl(arg
, arg
);
6960 register_name
= "SegCtl0";
6964 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6965 tcg_gen_ext32s_tl(arg
, arg
);
6966 register_name
= "SegCtl1";
6970 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
6971 tcg_gen_ext32s_tl(arg
, arg
);
6972 register_name
= "SegCtl2";
6976 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6977 register_name
= "PWBase";
6981 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
6982 register_name
= "PWField";
6986 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
6987 register_name
= "PWSize";
6990 goto cp0_unimplemented
;
6993 case CP0_REGISTER_06
:
6996 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6997 register_name
= "Wired";
7000 check_insn(ctx
, ISA_MIPS32R2
);
7001 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7002 register_name
= "SRSConf0";
7005 check_insn(ctx
, ISA_MIPS32R2
);
7006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7007 register_name
= "SRSConf1";
7010 check_insn(ctx
, ISA_MIPS32R2
);
7011 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7012 register_name
= "SRSConf2";
7015 check_insn(ctx
, ISA_MIPS32R2
);
7016 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7017 register_name
= "SRSConf3";
7020 check_insn(ctx
, ISA_MIPS32R2
);
7021 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7022 register_name
= "SRSConf4";
7026 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7027 register_name
= "PWCtl";
7030 goto cp0_unimplemented
;
7033 case CP0_REGISTER_07
:
7036 check_insn(ctx
, ISA_MIPS32R2
);
7037 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7038 register_name
= "HWREna";
7041 goto cp0_unimplemented
;
7044 case CP0_REGISTER_08
:
7047 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7048 tcg_gen_ext32s_tl(arg
, arg
);
7049 register_name
= "BadVAddr";
7053 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7054 register_name
= "BadInstr";
7058 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7059 register_name
= "BadInstrP";
7063 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7064 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7065 register_name
= "BadInstrX";
7068 goto cp0_unimplemented
;
7071 case CP0_REGISTER_09
:
7074 /* Mark as an IO operation because we read the time. */
7075 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7078 gen_helper_mfc0_count(arg
, cpu_env
);
7079 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7082 /* Break the TB to be able to take timer interrupts immediately
7083 after reading count. DISAS_STOP isn't sufficient, we need to
7084 ensure we break completely out of translated code. */
7085 gen_save_pc(ctx
->base
.pc_next
+ 4);
7086 ctx
->base
.is_jmp
= DISAS_EXIT
;
7087 register_name
= "Count";
7090 CP0_CHECK(ctx
->saar
);
7091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7092 register_name
= "SAARI";
7095 CP0_CHECK(ctx
->saar
);
7096 gen_helper_mfc0_saar(arg
, cpu_env
);
7097 register_name
= "SAAR";
7100 goto cp0_unimplemented
;
7103 case CP0_REGISTER_10
:
7106 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7107 tcg_gen_ext32s_tl(arg
, arg
);
7108 register_name
= "EntryHi";
7111 goto cp0_unimplemented
;
7114 case CP0_REGISTER_11
:
7117 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7118 register_name
= "Compare";
7120 /* 6,7 are implementation dependent */
7122 goto cp0_unimplemented
;
7125 case CP0_REGISTER_12
:
7128 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7129 register_name
= "Status";
7132 check_insn(ctx
, ISA_MIPS32R2
);
7133 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7134 register_name
= "IntCtl";
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7139 register_name
= "SRSCtl";
7142 check_insn(ctx
, ISA_MIPS32R2
);
7143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7144 register_name
= "SRSMap";
7147 goto cp0_unimplemented
;
7150 case CP0_REGISTER_13
:
7153 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7154 register_name
= "Cause";
7157 goto cp0_unimplemented
;
7160 case CP0_REGISTER_14
:
7163 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7164 tcg_gen_ext32s_tl(arg
, arg
);
7165 register_name
= "EPC";
7168 goto cp0_unimplemented
;
7171 case CP0_REGISTER_15
:
7174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7175 register_name
= "PRid";
7178 check_insn(ctx
, ISA_MIPS32R2
);
7179 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7180 tcg_gen_ext32s_tl(arg
, arg
);
7181 register_name
= "EBase";
7184 check_insn(ctx
, ISA_MIPS32R2
);
7185 CP0_CHECK(ctx
->cmgcr
);
7186 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7187 tcg_gen_ext32s_tl(arg
, arg
);
7188 register_name
= "CMGCRBase";
7191 goto cp0_unimplemented
;
7194 case CP0_REGISTER_16
:
7197 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7198 register_name
= "Config";
7201 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7202 register_name
= "Config1";
7205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7206 register_name
= "Config2";
7209 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7210 register_name
= "Config3";
7213 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7214 register_name
= "Config4";
7217 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7218 register_name
= "Config5";
7220 /* 6,7 are implementation dependent */
7222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7223 register_name
= "Config6";
7226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7227 register_name
= "Config7";
7230 goto cp0_unimplemented
;
7233 case CP0_REGISTER_17
:
7236 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7237 register_name
= "LLAddr";
7240 CP0_CHECK(ctx
->mrp
);
7241 gen_helper_mfc0_maar(arg
, cpu_env
);
7242 register_name
= "MAAR";
7245 CP0_CHECK(ctx
->mrp
);
7246 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7247 register_name
= "MAARI";
7250 goto cp0_unimplemented
;
7253 case CP0_REGISTER_18
:
7263 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7264 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7265 register_name
= "WatchLo";
7268 goto cp0_unimplemented
;
7271 case CP0_REGISTER_19
:
7281 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7282 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7283 register_name
= "WatchHi";
7286 goto cp0_unimplemented
;
7289 case CP0_REGISTER_20
:
7292 #if defined(TARGET_MIPS64)
7293 check_insn(ctx
, ISA_MIPS3
);
7294 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7295 tcg_gen_ext32s_tl(arg
, arg
);
7296 register_name
= "XContext";
7300 goto cp0_unimplemented
;
7303 case CP0_REGISTER_21
:
7304 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7305 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7309 register_name
= "Framemask";
7312 goto cp0_unimplemented
;
7315 case CP0_REGISTER_22
:
7316 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7317 register_name
= "'Diagnostic"; /* implementation dependent */
7319 case CP0_REGISTER_23
:
7322 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7323 register_name
= "Debug";
7326 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7327 register_name
= "TraceControl";
7328 goto cp0_unimplemented
;
7330 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7331 register_name
= "TraceControl2";
7332 goto cp0_unimplemented
;
7334 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7335 register_name
= "UserTraceData";
7336 goto cp0_unimplemented
;
7338 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7339 register_name
= "TraceBPC";
7340 goto cp0_unimplemented
;
7342 goto cp0_unimplemented
;
7345 case CP0_REGISTER_24
:
7349 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7350 tcg_gen_ext32s_tl(arg
, arg
);
7351 register_name
= "DEPC";
7354 goto cp0_unimplemented
;
7357 case CP0_REGISTER_25
:
7360 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7361 register_name
= "Performance0";
7364 // gen_helper_mfc0_performance1(arg);
7365 register_name
= "Performance1";
7366 goto cp0_unimplemented
;
7368 // gen_helper_mfc0_performance2(arg);
7369 register_name
= "Performance2";
7370 goto cp0_unimplemented
;
7372 // gen_helper_mfc0_performance3(arg);
7373 register_name
= "Performance3";
7374 goto cp0_unimplemented
;
7376 // gen_helper_mfc0_performance4(arg);
7377 register_name
= "Performance4";
7378 goto cp0_unimplemented
;
7380 // gen_helper_mfc0_performance5(arg);
7381 register_name
= "Performance5";
7382 goto cp0_unimplemented
;
7384 // gen_helper_mfc0_performance6(arg);
7385 register_name
= "Performance6";
7386 goto cp0_unimplemented
;
7388 // gen_helper_mfc0_performance7(arg);
7389 register_name
= "Performance7";
7390 goto cp0_unimplemented
;
7392 goto cp0_unimplemented
;
7395 case CP0_REGISTER_26
:
7398 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7399 register_name
= "ErrCtl";
7402 goto cp0_unimplemented
;
7405 case CP0_REGISTER_27
:
7411 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7412 register_name
= "CacheErr";
7415 goto cp0_unimplemented
;
7418 case CP0_REGISTER_28
:
7425 TCGv_i64 tmp
= tcg_temp_new_i64();
7426 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7427 gen_move_low32(arg
, tmp
);
7428 tcg_temp_free_i64(tmp
);
7430 register_name
= "TagLo";
7436 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7437 register_name
= "DataLo";
7440 goto cp0_unimplemented
;
7443 case CP0_REGISTER_29
:
7449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7450 register_name
= "TagHi";
7456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7457 register_name
= "DataHi";
7460 goto cp0_unimplemented
;
7463 case CP0_REGISTER_30
:
7466 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7467 tcg_gen_ext32s_tl(arg
, arg
);
7468 register_name
= "ErrorEPC";
7471 goto cp0_unimplemented
;
7474 case CP0_REGISTER_31
:
7478 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7479 register_name
= "DESAVE";
7487 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7488 tcg_gen_ld_tl(arg
, cpu_env
,
7489 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7490 tcg_gen_ext32s_tl(arg
, arg
);
7491 register_name
= "KScratch";
7494 goto cp0_unimplemented
;
7498 goto cp0_unimplemented
;
7500 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7504 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7505 register_name
, reg
, sel
);
7506 gen_mfc0_unimplemented(ctx
, arg
);
7509 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7511 const char *register_name
= "invalid";
7514 check_insn(ctx
, ISA_MIPS32
);
7517 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7522 case CP0_REGISTER_00
:
7525 gen_helper_mtc0_index(cpu_env
, arg
);
7526 register_name
= "Index";
7529 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7530 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7531 register_name
= "MVPControl";
7534 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7536 register_name
= "MVPConf0";
7539 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7541 register_name
= "MVPConf1";
7546 register_name
= "VPControl";
7549 goto cp0_unimplemented
;
7552 case CP0_REGISTER_01
:
7556 register_name
= "Random";
7559 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7560 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7561 register_name
= "VPEControl";
7564 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7565 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7566 register_name
= "VPEConf0";
7569 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7570 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7571 register_name
= "VPEConf1";
7574 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7575 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7576 register_name
= "YQMask";
7579 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7580 tcg_gen_st_tl(arg
, cpu_env
,
7581 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7582 register_name
= "VPESchedule";
7585 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7586 tcg_gen_st_tl(arg
, cpu_env
,
7587 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7588 register_name
= "VPEScheFBack";
7591 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7592 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7593 register_name
= "VPEOpt";
7596 goto cp0_unimplemented
;
7599 case CP0_REGISTER_02
:
7602 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7603 register_name
= "EntryLo0";
7606 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7607 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7608 register_name
= "TCStatus";
7611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7612 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7613 register_name
= "TCBind";
7616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7617 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7618 register_name
= "TCRestart";
7621 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7622 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7623 register_name
= "TCHalt";
7626 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7627 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7628 register_name
= "TCContext";
7631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7632 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7633 register_name
= "TCSchedule";
7636 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7637 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7638 register_name
= "TCScheFBack";
7641 goto cp0_unimplemented
;
7644 case CP0_REGISTER_03
:
7647 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7648 register_name
= "EntryLo1";
7653 register_name
= "GlobalNumber";
7656 goto cp0_unimplemented
;
7659 case CP0_REGISTER_04
:
7662 gen_helper_mtc0_context(cpu_env
, arg
);
7663 register_name
= "Context";
7666 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7667 register_name
= "ContextConfig";
7668 goto cp0_unimplemented
;
7670 CP0_CHECK(ctx
->ulri
);
7671 tcg_gen_st_tl(arg
, cpu_env
,
7672 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7673 register_name
= "UserLocal";
7676 goto cp0_unimplemented
;
7679 case CP0_REGISTER_05
:
7682 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7683 register_name
= "PageMask";
7686 check_insn(ctx
, ISA_MIPS32R2
);
7687 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7688 register_name
= "PageGrain";
7689 ctx
->base
.is_jmp
= DISAS_STOP
;
7693 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7694 register_name
= "SegCtl0";
7698 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7699 register_name
= "SegCtl1";
7703 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7704 register_name
= "SegCtl2";
7708 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7709 register_name
= "PWBase";
7713 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7714 register_name
= "PWField";
7718 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7719 register_name
= "PWSize";
7722 goto cp0_unimplemented
;
7725 case CP0_REGISTER_06
:
7728 gen_helper_mtc0_wired(cpu_env
, arg
);
7729 register_name
= "Wired";
7732 check_insn(ctx
, ISA_MIPS32R2
);
7733 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7734 register_name
= "SRSConf0";
7737 check_insn(ctx
, ISA_MIPS32R2
);
7738 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7739 register_name
= "SRSConf1";
7742 check_insn(ctx
, ISA_MIPS32R2
);
7743 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7744 register_name
= "SRSConf2";
7747 check_insn(ctx
, ISA_MIPS32R2
);
7748 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7749 register_name
= "SRSConf3";
7752 check_insn(ctx
, ISA_MIPS32R2
);
7753 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7754 register_name
= "SRSConf4";
7758 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7759 register_name
= "PWCtl";
7762 goto cp0_unimplemented
;
7765 case CP0_REGISTER_07
:
7768 check_insn(ctx
, ISA_MIPS32R2
);
7769 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7770 ctx
->base
.is_jmp
= DISAS_STOP
;
7771 register_name
= "HWREna";
7774 goto cp0_unimplemented
;
7777 case CP0_REGISTER_08
:
7781 register_name
= "BadVAddr";
7785 register_name
= "BadInstr";
7789 register_name
= "BadInstrP";
7793 register_name
= "BadInstrX";
7796 goto cp0_unimplemented
;
7799 case CP0_REGISTER_09
:
7802 gen_helper_mtc0_count(cpu_env
, arg
);
7803 register_name
= "Count";
7806 CP0_CHECK(ctx
->saar
);
7807 gen_helper_mtc0_saari(cpu_env
, arg
);
7808 register_name
= "SAARI";
7811 CP0_CHECK(ctx
->saar
);
7812 gen_helper_mtc0_saar(cpu_env
, arg
);
7813 register_name
= "SAAR";
7816 goto cp0_unimplemented
;
7819 case CP0_REGISTER_10
:
7822 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7823 register_name
= "EntryHi";
7826 goto cp0_unimplemented
;
7829 case CP0_REGISTER_11
:
7832 gen_helper_mtc0_compare(cpu_env
, arg
);
7833 register_name
= "Compare";
7835 /* 6,7 are implementation dependent */
7837 goto cp0_unimplemented
;
7840 case CP0_REGISTER_12
:
7843 save_cpu_state(ctx
, 1);
7844 gen_helper_mtc0_status(cpu_env
, arg
);
7845 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7846 gen_save_pc(ctx
->base
.pc_next
+ 4);
7847 ctx
->base
.is_jmp
= DISAS_EXIT
;
7848 register_name
= "Status";
7851 check_insn(ctx
, ISA_MIPS32R2
);
7852 gen_helper_mtc0_intctl(cpu_env
, arg
);
7853 /* Stop translation as we may have switched the execution mode */
7854 ctx
->base
.is_jmp
= DISAS_STOP
;
7855 register_name
= "IntCtl";
7858 check_insn(ctx
, ISA_MIPS32R2
);
7859 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7860 /* Stop translation as we may have switched the execution mode */
7861 ctx
->base
.is_jmp
= DISAS_STOP
;
7862 register_name
= "SRSCtl";
7865 check_insn(ctx
, ISA_MIPS32R2
);
7866 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7867 /* Stop translation as we may have switched the execution mode */
7868 ctx
->base
.is_jmp
= DISAS_STOP
;
7869 register_name
= "SRSMap";
7872 goto cp0_unimplemented
;
7875 case CP0_REGISTER_13
:
7878 save_cpu_state(ctx
, 1);
7879 gen_helper_mtc0_cause(cpu_env
, arg
);
7880 /* Stop translation as we may have triggered an interrupt.
7881 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7882 * translated code to check for pending interrupts. */
7883 gen_save_pc(ctx
->base
.pc_next
+ 4);
7884 ctx
->base
.is_jmp
= DISAS_EXIT
;
7885 register_name
= "Cause";
7888 goto cp0_unimplemented
;
7891 case CP0_REGISTER_14
:
7894 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7895 register_name
= "EPC";
7898 goto cp0_unimplemented
;
7901 case CP0_REGISTER_15
:
7905 register_name
= "PRid";
7908 check_insn(ctx
, ISA_MIPS32R2
);
7909 gen_helper_mtc0_ebase(cpu_env
, arg
);
7910 register_name
= "EBase";
7913 goto cp0_unimplemented
;
7916 case CP0_REGISTER_16
:
7919 gen_helper_mtc0_config0(cpu_env
, arg
);
7920 register_name
= "Config";
7921 /* Stop translation as we may have switched the execution mode */
7922 ctx
->base
.is_jmp
= DISAS_STOP
;
7925 /* ignored, read only */
7926 register_name
= "Config1";
7929 gen_helper_mtc0_config2(cpu_env
, arg
);
7930 register_name
= "Config2";
7931 /* Stop translation as we may have switched the execution mode */
7932 ctx
->base
.is_jmp
= DISAS_STOP
;
7935 gen_helper_mtc0_config3(cpu_env
, arg
);
7936 register_name
= "Config3";
7937 /* Stop translation as we may have switched the execution mode */
7938 ctx
->base
.is_jmp
= DISAS_STOP
;
7941 gen_helper_mtc0_config4(cpu_env
, arg
);
7942 register_name
= "Config4";
7943 ctx
->base
.is_jmp
= DISAS_STOP
;
7946 gen_helper_mtc0_config5(cpu_env
, arg
);
7947 register_name
= "Config5";
7948 /* Stop translation as we may have switched the execution mode */
7949 ctx
->base
.is_jmp
= DISAS_STOP
;
7951 /* 6,7 are implementation dependent */
7954 register_name
= "Config6";
7958 register_name
= "Config7";
7961 register_name
= "Invalid config selector";
7962 goto cp0_unimplemented
;
7965 case CP0_REGISTER_17
:
7968 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7969 register_name
= "LLAddr";
7972 CP0_CHECK(ctx
->mrp
);
7973 gen_helper_mtc0_maar(cpu_env
, arg
);
7974 register_name
= "MAAR";
7977 CP0_CHECK(ctx
->mrp
);
7978 gen_helper_mtc0_maari(cpu_env
, arg
);
7979 register_name
= "MAARI";
7982 goto cp0_unimplemented
;
7985 case CP0_REGISTER_18
:
7995 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7996 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7997 register_name
= "WatchLo";
8000 goto cp0_unimplemented
;
8003 case CP0_REGISTER_19
:
8013 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8014 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8015 register_name
= "WatchHi";
8018 goto cp0_unimplemented
;
8021 case CP0_REGISTER_20
:
8024 #if defined(TARGET_MIPS64)
8025 check_insn(ctx
, ISA_MIPS3
);
8026 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8027 register_name
= "XContext";
8031 goto cp0_unimplemented
;
8034 case CP0_REGISTER_21
:
8035 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8036 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8039 gen_helper_mtc0_framemask(cpu_env
, arg
);
8040 register_name
= "Framemask";
8043 goto cp0_unimplemented
;
8046 case CP0_REGISTER_22
:
8048 register_name
= "Diagnostic"; /* implementation dependent */
8050 case CP0_REGISTER_23
:
8053 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8054 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8055 gen_save_pc(ctx
->base
.pc_next
+ 4);
8056 ctx
->base
.is_jmp
= DISAS_EXIT
;
8057 register_name
= "Debug";
8060 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8061 register_name
= "TraceControl";
8062 /* Stop translation as we may have switched the execution mode */
8063 ctx
->base
.is_jmp
= DISAS_STOP
;
8064 goto cp0_unimplemented
;
8066 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8067 register_name
= "TraceControl2";
8068 /* Stop translation as we may have switched the execution mode */
8069 ctx
->base
.is_jmp
= DISAS_STOP
;
8070 goto cp0_unimplemented
;
8072 /* Stop translation as we may have switched the execution mode */
8073 ctx
->base
.is_jmp
= DISAS_STOP
;
8074 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8075 register_name
= "UserTraceData";
8076 /* Stop translation as we may have switched the execution mode */
8077 ctx
->base
.is_jmp
= DISAS_STOP
;
8078 goto cp0_unimplemented
;
8080 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8081 /* Stop translation as we may have switched the execution mode */
8082 ctx
->base
.is_jmp
= DISAS_STOP
;
8083 register_name
= "TraceBPC";
8084 goto cp0_unimplemented
;
8086 goto cp0_unimplemented
;
8089 case CP0_REGISTER_24
:
8093 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8094 register_name
= "DEPC";
8097 goto cp0_unimplemented
;
8100 case CP0_REGISTER_25
:
8103 gen_helper_mtc0_performance0(cpu_env
, arg
);
8104 register_name
= "Performance0";
8107 // gen_helper_mtc0_performance1(arg);
8108 register_name
= "Performance1";
8109 goto cp0_unimplemented
;
8111 // gen_helper_mtc0_performance2(arg);
8112 register_name
= "Performance2";
8113 goto cp0_unimplemented
;
8115 // gen_helper_mtc0_performance3(arg);
8116 register_name
= "Performance3";
8117 goto cp0_unimplemented
;
8119 // gen_helper_mtc0_performance4(arg);
8120 register_name
= "Performance4";
8121 goto cp0_unimplemented
;
8123 // gen_helper_mtc0_performance5(arg);
8124 register_name
= "Performance5";
8125 goto cp0_unimplemented
;
8127 // gen_helper_mtc0_performance6(arg);
8128 register_name
= "Performance6";
8129 goto cp0_unimplemented
;
8131 // gen_helper_mtc0_performance7(arg);
8132 register_name
= "Performance7";
8133 goto cp0_unimplemented
;
8135 goto cp0_unimplemented
;
8138 case CP0_REGISTER_26
:
8141 gen_helper_mtc0_errctl(cpu_env
, arg
);
8142 ctx
->base
.is_jmp
= DISAS_STOP
;
8143 register_name
= "ErrCtl";
8146 goto cp0_unimplemented
;
8149 case CP0_REGISTER_27
:
8156 register_name
= "CacheErr";
8159 goto cp0_unimplemented
;
8162 case CP0_REGISTER_28
:
8168 gen_helper_mtc0_taglo(cpu_env
, arg
);
8169 register_name
= "TagLo";
8175 gen_helper_mtc0_datalo(cpu_env
, arg
);
8176 register_name
= "DataLo";
8179 goto cp0_unimplemented
;
8182 case CP0_REGISTER_29
:
8188 gen_helper_mtc0_taghi(cpu_env
, arg
);
8189 register_name
= "TagHi";
8195 gen_helper_mtc0_datahi(cpu_env
, arg
);
8196 register_name
= "DataHi";
8199 register_name
= "invalid sel";
8200 goto cp0_unimplemented
;
8203 case CP0_REGISTER_30
:
8206 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8207 register_name
= "ErrorEPC";
8210 goto cp0_unimplemented
;
8213 case CP0_REGISTER_31
:
8217 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8218 register_name
= "DESAVE";
8226 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8227 tcg_gen_st_tl(arg
, cpu_env
,
8228 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8229 register_name
= "KScratch";
8232 goto cp0_unimplemented
;
8236 goto cp0_unimplemented
;
8238 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8240 /* For simplicity assume that all writes can cause interrupts. */
8241 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8243 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8244 * translated code to check for pending interrupts. */
8245 gen_save_pc(ctx
->base
.pc_next
+ 4);
8246 ctx
->base
.is_jmp
= DISAS_EXIT
;
8251 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8252 register_name
, reg
, sel
);
8255 #if defined(TARGET_MIPS64)
8256 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8258 const char *register_name
= "invalid";
8261 check_insn(ctx
, ISA_MIPS64
);
8265 case CP0_REGISTER_00
:
8268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8269 register_name
= "Index";
8272 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8273 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8274 register_name
= "MVPControl";
8277 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8278 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8279 register_name
= "MVPConf0";
8282 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8283 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8284 register_name
= "MVPConf1";
8288 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8289 register_name
= "VPControl";
8292 goto cp0_unimplemented
;
8295 case CP0_REGISTER_01
:
8298 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8299 gen_helper_mfc0_random(arg
, cpu_env
);
8300 register_name
= "Random";
8303 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8304 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8305 register_name
= "VPEControl";
8308 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8309 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8310 register_name
= "VPEConf0";
8313 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8315 register_name
= "VPEConf1";
8318 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8319 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8320 register_name
= "YQMask";
8323 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8324 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8325 register_name
= "VPESchedule";
8328 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8329 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8330 register_name
= "VPEScheFBack";
8333 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8334 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8335 register_name
= "VPEOpt";
8338 goto cp0_unimplemented
;
8341 case CP0_REGISTER_02
:
8344 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8345 register_name
= "EntryLo0";
8348 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8349 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8350 register_name
= "TCStatus";
8353 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8354 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8355 register_name
= "TCBind";
8358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8359 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8360 register_name
= "TCRestart";
8363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8364 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8365 register_name
= "TCHalt";
8368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8369 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8370 register_name
= "TCContext";
8373 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8374 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8375 register_name
= "TCSchedule";
8378 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8379 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8380 register_name
= "TCScheFBack";
8383 goto cp0_unimplemented
;
8386 case CP0_REGISTER_03
:
8389 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8390 register_name
= "EntryLo1";
8394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8395 register_name
= "GlobalNumber";
8398 goto cp0_unimplemented
;
8401 case CP0_REGISTER_04
:
8404 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8405 register_name
= "Context";
8408 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8409 register_name
= "ContextConfig";
8410 goto cp0_unimplemented
;
8412 CP0_CHECK(ctx
->ulri
);
8413 tcg_gen_ld_tl(arg
, cpu_env
,
8414 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8415 register_name
= "UserLocal";
8418 goto cp0_unimplemented
;
8421 case CP0_REGISTER_05
:
8424 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8425 register_name
= "PageMask";
8428 check_insn(ctx
, ISA_MIPS32R2
);
8429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8430 register_name
= "PageGrain";
8434 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8435 register_name
= "SegCtl0";
8439 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8440 register_name
= "SegCtl1";
8444 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8445 register_name
= "SegCtl2";
8449 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8450 register_name
= "PWBase";
8454 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8455 register_name
= "PWField";
8459 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8460 register_name
= "PWSize";
8463 goto cp0_unimplemented
;
8466 case CP0_REGISTER_06
:
8469 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8470 register_name
= "Wired";
8473 check_insn(ctx
, ISA_MIPS32R2
);
8474 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8475 register_name
= "SRSConf0";
8478 check_insn(ctx
, ISA_MIPS32R2
);
8479 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8480 register_name
= "SRSConf1";
8483 check_insn(ctx
, ISA_MIPS32R2
);
8484 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8485 register_name
= "SRSConf2";
8488 check_insn(ctx
, ISA_MIPS32R2
);
8489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8490 register_name
= "SRSConf3";
8493 check_insn(ctx
, ISA_MIPS32R2
);
8494 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8495 register_name
= "SRSConf4";
8499 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8500 register_name
= "PWCtl";
8503 goto cp0_unimplemented
;
8506 case CP0_REGISTER_07
:
8509 check_insn(ctx
, ISA_MIPS32R2
);
8510 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8511 register_name
= "HWREna";
8514 goto cp0_unimplemented
;
8517 case CP0_REGISTER_08
:
8520 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8521 register_name
= "BadVAddr";
8525 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8526 register_name
= "BadInstr";
8530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8531 register_name
= "BadInstrP";
8535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8536 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8537 register_name
= "BadInstrX";
8540 goto cp0_unimplemented
;
8543 case CP0_REGISTER_09
:
8546 /* Mark as an IO operation because we read the time. */
8547 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8550 gen_helper_mfc0_count(arg
, cpu_env
);
8551 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8554 /* Break the TB to be able to take timer interrupts immediately
8555 after reading count. DISAS_STOP isn't sufficient, we need to
8556 ensure we break completely out of translated code. */
8557 gen_save_pc(ctx
->base
.pc_next
+ 4);
8558 ctx
->base
.is_jmp
= DISAS_EXIT
;
8559 register_name
= "Count";
8562 CP0_CHECK(ctx
->saar
);
8563 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8564 register_name
= "SAARI";
8567 CP0_CHECK(ctx
->saar
);
8568 gen_helper_dmfc0_saar(arg
, cpu_env
);
8569 register_name
= "SAAR";
8572 goto cp0_unimplemented
;
8575 case CP0_REGISTER_10
:
8578 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8579 register_name
= "EntryHi";
8582 goto cp0_unimplemented
;
8585 case CP0_REGISTER_11
:
8588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8589 register_name
= "Compare";
8591 /* 6,7 are implementation dependent */
8593 goto cp0_unimplemented
;
8596 case CP0_REGISTER_12
:
8599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8600 register_name
= "Status";
8603 check_insn(ctx
, ISA_MIPS32R2
);
8604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8605 register_name
= "IntCtl";
8608 check_insn(ctx
, ISA_MIPS32R2
);
8609 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8610 register_name
= "SRSCtl";
8613 check_insn(ctx
, ISA_MIPS32R2
);
8614 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8615 register_name
= "SRSMap";
8618 goto cp0_unimplemented
;
8621 case CP0_REGISTER_13
:
8624 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8625 register_name
= "Cause";
8628 goto cp0_unimplemented
;
8631 case CP0_REGISTER_14
:
8634 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8635 register_name
= "EPC";
8638 goto cp0_unimplemented
;
8641 case CP0_REGISTER_15
:
8644 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8645 register_name
= "PRid";
8648 check_insn(ctx
, ISA_MIPS32R2
);
8649 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8650 register_name
= "EBase";
8653 check_insn(ctx
, ISA_MIPS32R2
);
8654 CP0_CHECK(ctx
->cmgcr
);
8655 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8656 register_name
= "CMGCRBase";
8659 goto cp0_unimplemented
;
8662 case CP0_REGISTER_16
:
8665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8666 register_name
= "Config";
8669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8670 register_name
= "Config1";
8673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8674 register_name
= "Config2";
8677 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8678 register_name
= "Config3";
8681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8682 register_name
= "Config4";
8685 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8686 register_name
= "Config5";
8688 /* 6,7 are implementation dependent */
8690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8691 register_name
= "Config6";
8694 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8695 register_name
= "Config7";
8698 goto cp0_unimplemented
;
8701 case CP0_REGISTER_17
:
8704 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8705 register_name
= "LLAddr";
8708 CP0_CHECK(ctx
->mrp
);
8709 gen_helper_dmfc0_maar(arg
, cpu_env
);
8710 register_name
= "MAAR";
8713 CP0_CHECK(ctx
->mrp
);
8714 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8715 register_name
= "MAARI";
8718 goto cp0_unimplemented
;
8721 case CP0_REGISTER_18
:
8731 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8732 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8733 register_name
= "WatchLo";
8736 goto cp0_unimplemented
;
8739 case CP0_REGISTER_19
:
8749 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8750 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8751 register_name
= "WatchHi";
8754 goto cp0_unimplemented
;
8757 case CP0_REGISTER_20
:
8760 check_insn(ctx
, ISA_MIPS3
);
8761 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8762 register_name
= "XContext";
8765 goto cp0_unimplemented
;
8768 case CP0_REGISTER_21
:
8769 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8770 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8773 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8774 register_name
= "Framemask";
8777 goto cp0_unimplemented
;
8780 case CP0_REGISTER_22
:
8781 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8782 register_name
= "'Diagnostic"; /* implementation dependent */
8784 case CP0_REGISTER_23
:
8787 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8788 register_name
= "Debug";
8791 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8792 register_name
= "TraceControl";
8793 goto cp0_unimplemented
;
8795 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8796 register_name
= "TraceControl2";
8797 goto cp0_unimplemented
;
8799 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8800 register_name
= "UserTraceData";
8801 goto cp0_unimplemented
;
8803 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8804 register_name
= "TraceBPC";
8805 goto cp0_unimplemented
;
8807 goto cp0_unimplemented
;
8810 case CP0_REGISTER_24
:
8814 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8815 register_name
= "DEPC";
8818 goto cp0_unimplemented
;
8821 case CP0_REGISTER_25
:
8824 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8825 register_name
= "Performance0";
8828 // gen_helper_dmfc0_performance1(arg);
8829 register_name
= "Performance1";
8830 goto cp0_unimplemented
;
8832 // gen_helper_dmfc0_performance2(arg);
8833 register_name
= "Performance2";
8834 goto cp0_unimplemented
;
8836 // gen_helper_dmfc0_performance3(arg);
8837 register_name
= "Performance3";
8838 goto cp0_unimplemented
;
8840 // gen_helper_dmfc0_performance4(arg);
8841 register_name
= "Performance4";
8842 goto cp0_unimplemented
;
8844 // gen_helper_dmfc0_performance5(arg);
8845 register_name
= "Performance5";
8846 goto cp0_unimplemented
;
8848 // gen_helper_dmfc0_performance6(arg);
8849 register_name
= "Performance6";
8850 goto cp0_unimplemented
;
8852 // gen_helper_dmfc0_performance7(arg);
8853 register_name
= "Performance7";
8854 goto cp0_unimplemented
;
8856 goto cp0_unimplemented
;
8859 case CP0_REGISTER_26
:
8862 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8863 register_name
= "ErrCtl";
8866 goto cp0_unimplemented
;
8869 case CP0_REGISTER_27
:
8876 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8877 register_name
= "CacheErr";
8880 goto cp0_unimplemented
;
8883 case CP0_REGISTER_28
:
8889 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8890 register_name
= "TagLo";
8896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8897 register_name
= "DataLo";
8900 goto cp0_unimplemented
;
8903 case CP0_REGISTER_29
:
8909 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8910 register_name
= "TagHi";
8916 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8917 register_name
= "DataHi";
8920 goto cp0_unimplemented
;
8923 case CP0_REGISTER_30
:
8926 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8927 register_name
= "ErrorEPC";
8930 goto cp0_unimplemented
;
8933 case CP0_REGISTER_31
:
8937 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8938 register_name
= "DESAVE";
8946 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8947 tcg_gen_ld_tl(arg
, cpu_env
,
8948 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8949 register_name
= "KScratch";
8952 goto cp0_unimplemented
;
8956 goto cp0_unimplemented
;
8958 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
8962 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
8963 register_name
, reg
, sel
);
8964 gen_mfc0_unimplemented(ctx
, arg
);
8967 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8969 const char *register_name
= "invalid";
8972 check_insn(ctx
, ISA_MIPS64
);
8975 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8980 case CP0_REGISTER_00
:
8983 gen_helper_mtc0_index(cpu_env
, arg
);
8984 register_name
= "Index";
8987 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8988 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
8989 register_name
= "MVPControl";
8992 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8994 register_name
= "MVPConf0";
8997 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8999 register_name
= "MVPConf1";
9004 register_name
= "VPControl";
9007 goto cp0_unimplemented
;
9010 case CP0_REGISTER_01
:
9014 register_name
= "Random";
9017 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9018 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9019 register_name
= "VPEControl";
9022 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9023 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9024 register_name
= "VPEConf0";
9027 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9028 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9029 register_name
= "VPEConf1";
9032 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9033 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9034 register_name
= "YQMask";
9037 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9038 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
9039 register_name
= "VPESchedule";
9042 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9043 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9044 register_name
= "VPEScheFBack";
9047 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9048 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9049 register_name
= "VPEOpt";
9052 goto cp0_unimplemented
;
9055 case CP0_REGISTER_02
:
9058 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9059 register_name
= "EntryLo0";
9062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9063 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9064 register_name
= "TCStatus";
9067 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9068 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9069 register_name
= "TCBind";
9072 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9073 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9074 register_name
= "TCRestart";
9077 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9078 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9079 register_name
= "TCHalt";
9082 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9083 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9084 register_name
= "TCContext";
9087 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9088 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9089 register_name
= "TCSchedule";
9092 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9093 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9094 register_name
= "TCScheFBack";
9097 goto cp0_unimplemented
;
9100 case CP0_REGISTER_03
:
9103 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9104 register_name
= "EntryLo1";
9109 register_name
= "GlobalNumber";
9112 goto cp0_unimplemented
;
9115 case CP0_REGISTER_04
:
9118 gen_helper_mtc0_context(cpu_env
, arg
);
9119 register_name
= "Context";
9122 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9123 register_name
= "ContextConfig";
9124 goto cp0_unimplemented
;
9126 CP0_CHECK(ctx
->ulri
);
9127 tcg_gen_st_tl(arg
, cpu_env
,
9128 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9129 register_name
= "UserLocal";
9132 goto cp0_unimplemented
;
9135 case CP0_REGISTER_05
:
9138 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9139 register_name
= "PageMask";
9142 check_insn(ctx
, ISA_MIPS32R2
);
9143 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9144 register_name
= "PageGrain";
9148 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9149 register_name
= "SegCtl0";
9153 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9154 register_name
= "SegCtl1";
9158 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9159 register_name
= "SegCtl2";
9163 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9164 register_name
= "PWBase";
9168 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9169 register_name
= "PWField";
9173 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9174 register_name
= "PWSize";
9177 goto cp0_unimplemented
;
9180 case CP0_REGISTER_06
:
9183 gen_helper_mtc0_wired(cpu_env
, arg
);
9184 register_name
= "Wired";
9187 check_insn(ctx
, ISA_MIPS32R2
);
9188 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9189 register_name
= "SRSConf0";
9192 check_insn(ctx
, ISA_MIPS32R2
);
9193 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9194 register_name
= "SRSConf1";
9197 check_insn(ctx
, ISA_MIPS32R2
);
9198 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9199 register_name
= "SRSConf2";
9202 check_insn(ctx
, ISA_MIPS32R2
);
9203 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9204 register_name
= "SRSConf3";
9207 check_insn(ctx
, ISA_MIPS32R2
);
9208 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9209 register_name
= "SRSConf4";
9213 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9214 register_name
= "PWCtl";
9217 goto cp0_unimplemented
;
9220 case CP0_REGISTER_07
:
9223 check_insn(ctx
, ISA_MIPS32R2
);
9224 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9225 ctx
->base
.is_jmp
= DISAS_STOP
;
9226 register_name
= "HWREna";
9229 goto cp0_unimplemented
;
9232 case CP0_REGISTER_08
:
9236 register_name
= "BadVAddr";
9240 register_name
= "BadInstr";
9244 register_name
= "BadInstrP";
9248 register_name
= "BadInstrX";
9251 goto cp0_unimplemented
;
9254 case CP0_REGISTER_09
:
9257 gen_helper_mtc0_count(cpu_env
, arg
);
9258 register_name
= "Count";
9261 CP0_CHECK(ctx
->saar
);
9262 gen_helper_mtc0_saari(cpu_env
, arg
);
9263 register_name
= "SAARI";
9266 CP0_CHECK(ctx
->saar
);
9267 gen_helper_mtc0_saar(cpu_env
, arg
);
9268 register_name
= "SAAR";
9271 goto cp0_unimplemented
;
9273 /* Stop translation as we may have switched the execution mode */
9274 ctx
->base
.is_jmp
= DISAS_STOP
;
9276 case CP0_REGISTER_10
:
9279 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9280 register_name
= "EntryHi";
9283 goto cp0_unimplemented
;
9286 case CP0_REGISTER_11
:
9289 gen_helper_mtc0_compare(cpu_env
, arg
);
9290 register_name
= "Compare";
9292 /* 6,7 are implementation dependent */
9294 goto cp0_unimplemented
;
9296 /* Stop translation as we may have switched the execution mode */
9297 ctx
->base
.is_jmp
= DISAS_STOP
;
9299 case CP0_REGISTER_12
:
9302 save_cpu_state(ctx
, 1);
9303 gen_helper_mtc0_status(cpu_env
, arg
);
9304 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9305 gen_save_pc(ctx
->base
.pc_next
+ 4);
9306 ctx
->base
.is_jmp
= DISAS_EXIT
;
9307 register_name
= "Status";
9310 check_insn(ctx
, ISA_MIPS32R2
);
9311 gen_helper_mtc0_intctl(cpu_env
, arg
);
9312 /* Stop translation as we may have switched the execution mode */
9313 ctx
->base
.is_jmp
= DISAS_STOP
;
9314 register_name
= "IntCtl";
9317 check_insn(ctx
, ISA_MIPS32R2
);
9318 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9319 /* Stop translation as we may have switched the execution mode */
9320 ctx
->base
.is_jmp
= DISAS_STOP
;
9321 register_name
= "SRSCtl";
9324 check_insn(ctx
, ISA_MIPS32R2
);
9325 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9326 /* Stop translation as we may have switched the execution mode */
9327 ctx
->base
.is_jmp
= DISAS_STOP
;
9328 register_name
= "SRSMap";
9331 goto cp0_unimplemented
;
9334 case CP0_REGISTER_13
:
9337 save_cpu_state(ctx
, 1);
9338 gen_helper_mtc0_cause(cpu_env
, arg
);
9339 /* Stop translation as we may have triggered an interrupt.
9340 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9341 * translated code to check for pending interrupts. */
9342 gen_save_pc(ctx
->base
.pc_next
+ 4);
9343 ctx
->base
.is_jmp
= DISAS_EXIT
;
9344 register_name
= "Cause";
9347 goto cp0_unimplemented
;
9350 case CP0_REGISTER_14
:
9353 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9354 register_name
= "EPC";
9357 goto cp0_unimplemented
;
9360 case CP0_REGISTER_15
:
9364 register_name
= "PRid";
9367 check_insn(ctx
, ISA_MIPS32R2
);
9368 gen_helper_mtc0_ebase(cpu_env
, arg
);
9369 register_name
= "EBase";
9372 goto cp0_unimplemented
;
9375 case CP0_REGISTER_16
:
9378 gen_helper_mtc0_config0(cpu_env
, arg
);
9379 register_name
= "Config";
9380 /* Stop translation as we may have switched the execution mode */
9381 ctx
->base
.is_jmp
= DISAS_STOP
;
9384 /* ignored, read only */
9385 register_name
= "Config1";
9388 gen_helper_mtc0_config2(cpu_env
, arg
);
9389 register_name
= "Config2";
9390 /* Stop translation as we may have switched the execution mode */
9391 ctx
->base
.is_jmp
= DISAS_STOP
;
9394 gen_helper_mtc0_config3(cpu_env
, arg
);
9395 register_name
= "Config3";
9396 /* Stop translation as we may have switched the execution mode */
9397 ctx
->base
.is_jmp
= DISAS_STOP
;
9400 /* currently ignored */
9401 register_name
= "Config4";
9404 gen_helper_mtc0_config5(cpu_env
, arg
);
9405 register_name
= "Config5";
9406 /* Stop translation as we may have switched the execution mode */
9407 ctx
->base
.is_jmp
= DISAS_STOP
;
9409 /* 6,7 are implementation dependent */
9411 register_name
= "Invalid config selector";
9412 goto cp0_unimplemented
;
9415 case CP0_REGISTER_17
:
9418 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9419 register_name
= "LLAddr";
9422 CP0_CHECK(ctx
->mrp
);
9423 gen_helper_mtc0_maar(cpu_env
, arg
);
9424 register_name
= "MAAR";
9427 CP0_CHECK(ctx
->mrp
);
9428 gen_helper_mtc0_maari(cpu_env
, arg
);
9429 register_name
= "MAARI";
9432 goto cp0_unimplemented
;
9435 case CP0_REGISTER_18
:
9445 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9446 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9447 register_name
= "WatchLo";
9450 goto cp0_unimplemented
;
9453 case CP0_REGISTER_19
:
9463 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9464 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9465 register_name
= "WatchHi";
9468 goto cp0_unimplemented
;
9471 case CP0_REGISTER_20
:
9474 check_insn(ctx
, ISA_MIPS3
);
9475 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9476 register_name
= "XContext";
9479 goto cp0_unimplemented
;
9482 case CP0_REGISTER_21
:
9483 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9484 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9487 gen_helper_mtc0_framemask(cpu_env
, arg
);
9488 register_name
= "Framemask";
9491 goto cp0_unimplemented
;
9494 case CP0_REGISTER_22
:
9496 register_name
= "Diagnostic"; /* implementation dependent */
9498 case CP0_REGISTER_23
:
9501 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9502 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9503 gen_save_pc(ctx
->base
.pc_next
+ 4);
9504 ctx
->base
.is_jmp
= DISAS_EXIT
;
9505 register_name
= "Debug";
9508 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9509 /* Stop translation as we may have switched the execution mode */
9510 ctx
->base
.is_jmp
= DISAS_STOP
;
9511 register_name
= "TraceControl";
9512 goto cp0_unimplemented
;
9514 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9515 /* Stop translation as we may have switched the execution mode */
9516 ctx
->base
.is_jmp
= DISAS_STOP
;
9517 register_name
= "TraceControl2";
9518 goto cp0_unimplemented
;
9520 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9521 /* Stop translation as we may have switched the execution mode */
9522 ctx
->base
.is_jmp
= DISAS_STOP
;
9523 register_name
= "UserTraceData";
9524 goto cp0_unimplemented
;
9526 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9527 /* Stop translation as we may have switched the execution mode */
9528 ctx
->base
.is_jmp
= DISAS_STOP
;
9529 register_name
= "TraceBPC";
9530 goto cp0_unimplemented
;
9532 goto cp0_unimplemented
;
9535 case CP0_REGISTER_24
:
9539 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9540 register_name
= "DEPC";
9543 goto cp0_unimplemented
;
9546 case CP0_REGISTER_25
:
9549 gen_helper_mtc0_performance0(cpu_env
, arg
);
9550 register_name
= "Performance0";
9553 // gen_helper_mtc0_performance1(cpu_env, arg);
9554 register_name
= "Performance1";
9555 goto cp0_unimplemented
;
9557 // gen_helper_mtc0_performance2(cpu_env, arg);
9558 register_name
= "Performance2";
9559 goto cp0_unimplemented
;
9561 // gen_helper_mtc0_performance3(cpu_env, arg);
9562 register_name
= "Performance3";
9563 goto cp0_unimplemented
;
9565 // gen_helper_mtc0_performance4(cpu_env, arg);
9566 register_name
= "Performance4";
9567 goto cp0_unimplemented
;
9569 // gen_helper_mtc0_performance5(cpu_env, arg);
9570 register_name
= "Performance5";
9571 goto cp0_unimplemented
;
9573 // gen_helper_mtc0_performance6(cpu_env, arg);
9574 register_name
= "Performance6";
9575 goto cp0_unimplemented
;
9577 // gen_helper_mtc0_performance7(cpu_env, arg);
9578 register_name
= "Performance7";
9579 goto cp0_unimplemented
;
9581 goto cp0_unimplemented
;
9584 case CP0_REGISTER_26
:
9587 gen_helper_mtc0_errctl(cpu_env
, arg
);
9588 ctx
->base
.is_jmp
= DISAS_STOP
;
9589 register_name
= "ErrCtl";
9592 goto cp0_unimplemented
;
9595 case CP0_REGISTER_27
:
9602 register_name
= "CacheErr";
9605 goto cp0_unimplemented
;
9608 case CP0_REGISTER_28
:
9614 gen_helper_mtc0_taglo(cpu_env
, arg
);
9615 register_name
= "TagLo";
9621 gen_helper_mtc0_datalo(cpu_env
, arg
);
9622 register_name
= "DataLo";
9625 goto cp0_unimplemented
;
9628 case CP0_REGISTER_29
:
9634 gen_helper_mtc0_taghi(cpu_env
, arg
);
9635 register_name
= "TagHi";
9641 gen_helper_mtc0_datahi(cpu_env
, arg
);
9642 register_name
= "DataHi";
9645 register_name
= "invalid sel";
9646 goto cp0_unimplemented
;
9649 case CP0_REGISTER_30
:
9652 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9653 register_name
= "ErrorEPC";
9656 goto cp0_unimplemented
;
9659 case CP0_REGISTER_31
:
9663 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9664 register_name
= "DESAVE";
9672 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9673 tcg_gen_st_tl(arg
, cpu_env
,
9674 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9675 register_name
= "KScratch";
9678 goto cp0_unimplemented
;
9682 goto cp0_unimplemented
;
9684 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9686 /* For simplicity assume that all writes can cause interrupts. */
9687 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9689 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
9690 * translated code to check for pending interrupts. */
9691 gen_save_pc(ctx
->base
.pc_next
+ 4);
9692 ctx
->base
.is_jmp
= DISAS_EXIT
;
9697 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9698 register_name
, reg
, sel
);
9700 #endif /* TARGET_MIPS64 */
9702 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9703 int u
, int sel
, int h
)
9705 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9706 TCGv t0
= tcg_temp_local_new();
9708 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9709 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9710 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9711 tcg_gen_movi_tl(t0
, -1);
9712 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9713 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9714 tcg_gen_movi_tl(t0
, -1);
9715 } else if (u
== 0) {
9720 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9723 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9733 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9736 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9739 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9742 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9745 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9748 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9751 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9754 gen_mfc0(ctx
, t0
, rt
, sel
);
9761 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9764 gen_mfc0(ctx
, t0
, rt
, sel
);
9770 gen_helper_mftc0_status(t0
, cpu_env
);
9773 gen_mfc0(ctx
, t0
, rt
, sel
);
9779 gen_helper_mftc0_cause(t0
, cpu_env
);
9789 gen_helper_mftc0_epc(t0
, cpu_env
);
9799 gen_helper_mftc0_ebase(t0
, cpu_env
);
9816 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9826 gen_helper_mftc0_debug(t0
, cpu_env
);
9829 gen_mfc0(ctx
, t0
, rt
, sel
);
9834 gen_mfc0(ctx
, t0
, rt
, sel
);
9836 } else switch (sel
) {
9837 /* GPR registers. */
9839 gen_helper_1e0i(mftgpr
, t0
, rt
);
9841 /* Auxiliary CPU registers */
9845 gen_helper_1e0i(mftlo
, t0
, 0);
9848 gen_helper_1e0i(mfthi
, t0
, 0);
9851 gen_helper_1e0i(mftacx
, t0
, 0);
9854 gen_helper_1e0i(mftlo
, t0
, 1);
9857 gen_helper_1e0i(mfthi
, t0
, 1);
9860 gen_helper_1e0i(mftacx
, t0
, 1);
9863 gen_helper_1e0i(mftlo
, t0
, 2);
9866 gen_helper_1e0i(mfthi
, t0
, 2);
9869 gen_helper_1e0i(mftacx
, t0
, 2);
9872 gen_helper_1e0i(mftlo
, t0
, 3);
9875 gen_helper_1e0i(mfthi
, t0
, 3);
9878 gen_helper_1e0i(mftacx
, t0
, 3);
9881 gen_helper_mftdsp(t0
, cpu_env
);
9887 /* Floating point (COP1). */
9889 /* XXX: For now we support only a single FPU context. */
9891 TCGv_i32 fp0
= tcg_temp_new_i32();
9893 gen_load_fpr32(ctx
, fp0
, rt
);
9894 tcg_gen_ext_i32_tl(t0
, fp0
);
9895 tcg_temp_free_i32(fp0
);
9897 TCGv_i32 fp0
= tcg_temp_new_i32();
9899 gen_load_fpr32h(ctx
, fp0
, rt
);
9900 tcg_gen_ext_i32_tl(t0
, fp0
);
9901 tcg_temp_free_i32(fp0
);
9905 /* XXX: For now we support only a single FPU context. */
9906 gen_helper_1e0i(cfc1
, t0
, rt
);
9908 /* COP2: Not implemented. */
9915 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9916 gen_store_gpr(t0
, rd
);
9922 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9923 generate_exception_end(ctx
, EXCP_RI
);
9926 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9927 int u
, int sel
, int h
)
9929 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9930 TCGv t0
= tcg_temp_local_new();
9932 gen_load_gpr(t0
, rt
);
9933 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9934 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9935 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9937 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9938 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9940 } else if (u
== 0) {
9945 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
9948 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
9958 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
9961 gen_helper_mttc0_tcbind(cpu_env
, t0
);
9964 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
9967 gen_helper_mttc0_tchalt(cpu_env
, t0
);
9970 gen_helper_mttc0_tccontext(cpu_env
, t0
);
9973 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
9976 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
9979 gen_mtc0(ctx
, t0
, rd
, sel
);
9986 gen_helper_mttc0_entryhi(cpu_env
, t0
);
9989 gen_mtc0(ctx
, t0
, rd
, sel
);
9995 gen_helper_mttc0_status(cpu_env
, t0
);
9998 gen_mtc0(ctx
, t0
, rd
, sel
);
10004 gen_helper_mttc0_cause(cpu_env
, t0
);
10014 gen_helper_mttc0_ebase(cpu_env
, t0
);
10024 gen_helper_mttc0_debug(cpu_env
, t0
);
10027 gen_mtc0(ctx
, t0
, rd
, sel
);
10032 gen_mtc0(ctx
, t0
, rd
, sel
);
10034 } else switch (sel
) {
10035 /* GPR registers. */
10037 gen_helper_0e1i(mttgpr
, t0
, rd
);
10039 /* Auxiliary CPU registers */
10043 gen_helper_0e1i(mttlo
, t0
, 0);
10046 gen_helper_0e1i(mtthi
, t0
, 0);
10049 gen_helper_0e1i(mttacx
, t0
, 0);
10052 gen_helper_0e1i(mttlo
, t0
, 1);
10055 gen_helper_0e1i(mtthi
, t0
, 1);
10058 gen_helper_0e1i(mttacx
, t0
, 1);
10061 gen_helper_0e1i(mttlo
, t0
, 2);
10064 gen_helper_0e1i(mtthi
, t0
, 2);
10067 gen_helper_0e1i(mttacx
, t0
, 2);
10070 gen_helper_0e1i(mttlo
, t0
, 3);
10073 gen_helper_0e1i(mtthi
, t0
, 3);
10076 gen_helper_0e1i(mttacx
, t0
, 3);
10079 gen_helper_mttdsp(cpu_env
, t0
);
10085 /* Floating point (COP1). */
10087 /* XXX: For now we support only a single FPU context. */
10089 TCGv_i32 fp0
= tcg_temp_new_i32();
10091 tcg_gen_trunc_tl_i32(fp0
, t0
);
10092 gen_store_fpr32(ctx
, fp0
, rd
);
10093 tcg_temp_free_i32(fp0
);
10095 TCGv_i32 fp0
= tcg_temp_new_i32();
10097 tcg_gen_trunc_tl_i32(fp0
, t0
);
10098 gen_store_fpr32h(ctx
, fp0
, rd
);
10099 tcg_temp_free_i32(fp0
);
10103 /* XXX: For now we support only a single FPU context. */
10105 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10107 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10108 tcg_temp_free_i32(fs_tmp
);
10110 /* Stop translation as we may have changed hflags */
10111 ctx
->base
.is_jmp
= DISAS_STOP
;
10113 /* COP2: Not implemented. */
10120 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10126 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10127 generate_exception_end(ctx
, EXCP_RI
);
10130 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
10133 const char *opn
= "ldst";
10135 check_cp0_enabled(ctx
);
10139 /* Treat as NOP. */
10142 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10147 TCGv t0
= tcg_temp_new();
10149 gen_load_gpr(t0
, rt
);
10150 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10155 #if defined(TARGET_MIPS64)
10157 check_insn(ctx
, ISA_MIPS3
);
10159 /* Treat as NOP. */
10162 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10166 check_insn(ctx
, ISA_MIPS3
);
10168 TCGv t0
= tcg_temp_new();
10170 gen_load_gpr(t0
, rt
);
10171 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10180 /* Treat as NOP. */
10183 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10189 TCGv t0
= tcg_temp_new();
10190 gen_load_gpr(t0
, rt
);
10191 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10197 check_cp0_enabled(ctx
);
10199 /* Treat as NOP. */
10202 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10203 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10207 check_cp0_enabled(ctx
);
10208 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10209 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10214 if (!env
->tlb
->helper_tlbwi
) {
10217 gen_helper_tlbwi(cpu_env
);
10221 if (ctx
->ie
>= 2) {
10222 if (!env
->tlb
->helper_tlbinv
) {
10225 gen_helper_tlbinv(cpu_env
);
10226 } /* treat as nop if TLBINV not supported */
10230 if (ctx
->ie
>= 2) {
10231 if (!env
->tlb
->helper_tlbinvf
) {
10234 gen_helper_tlbinvf(cpu_env
);
10235 } /* treat as nop if TLBINV not supported */
10239 if (!env
->tlb
->helper_tlbwr
) {
10242 gen_helper_tlbwr(cpu_env
);
10246 if (!env
->tlb
->helper_tlbp
) {
10249 gen_helper_tlbp(cpu_env
);
10253 if (!env
->tlb
->helper_tlbr
) {
10256 gen_helper_tlbr(cpu_env
);
10258 case OPC_ERET
: /* OPC_ERETNC */
10259 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10260 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10263 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10264 if (ctx
->opcode
& (1 << bit_shift
)) {
10267 check_insn(ctx
, ISA_MIPS32R5
);
10268 gen_helper_eretnc(cpu_env
);
10272 check_insn(ctx
, ISA_MIPS2
);
10273 gen_helper_eret(cpu_env
);
10275 ctx
->base
.is_jmp
= DISAS_EXIT
;
10280 check_insn(ctx
, ISA_MIPS32
);
10281 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10282 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10285 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10287 generate_exception_end(ctx
, EXCP_RI
);
10289 gen_helper_deret(cpu_env
);
10290 ctx
->base
.is_jmp
= DISAS_EXIT
;
10295 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10296 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10297 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10300 /* If we get an exception, we want to restart at next instruction */
10301 ctx
->base
.pc_next
+= 4;
10302 save_cpu_state(ctx
, 1);
10303 ctx
->base
.pc_next
-= 4;
10304 gen_helper_wait(cpu_env
);
10305 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10310 generate_exception_end(ctx
, EXCP_RI
);
10313 (void)opn
; /* avoid a compiler warning */
10315 #endif /* !CONFIG_USER_ONLY */
10317 /* CP1 Branches (before delay slot) */
10318 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10319 int32_t cc
, int32_t offset
)
10321 target_ulong btarget
;
10322 TCGv_i32 t0
= tcg_temp_new_i32();
10324 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10325 generate_exception_end(ctx
, EXCP_RI
);
10330 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10333 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10337 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10338 tcg_gen_not_i32(t0
, t0
);
10339 tcg_gen_andi_i32(t0
, t0
, 1);
10340 tcg_gen_extu_i32_tl(bcond
, t0
);
10343 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10344 tcg_gen_not_i32(t0
, t0
);
10345 tcg_gen_andi_i32(t0
, t0
, 1);
10346 tcg_gen_extu_i32_tl(bcond
, t0
);
10349 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10350 tcg_gen_andi_i32(t0
, t0
, 1);
10351 tcg_gen_extu_i32_tl(bcond
, t0
);
10354 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10355 tcg_gen_andi_i32(t0
, t0
, 1);
10356 tcg_gen_extu_i32_tl(bcond
, t0
);
10358 ctx
->hflags
|= MIPS_HFLAG_BL
;
10362 TCGv_i32 t1
= tcg_temp_new_i32();
10363 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10364 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10365 tcg_gen_nand_i32(t0
, t0
, t1
);
10366 tcg_temp_free_i32(t1
);
10367 tcg_gen_andi_i32(t0
, t0
, 1);
10368 tcg_gen_extu_i32_tl(bcond
, t0
);
10373 TCGv_i32 t1
= tcg_temp_new_i32();
10374 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10375 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10376 tcg_gen_or_i32(t0
, t0
, t1
);
10377 tcg_temp_free_i32(t1
);
10378 tcg_gen_andi_i32(t0
, t0
, 1);
10379 tcg_gen_extu_i32_tl(bcond
, t0
);
10384 TCGv_i32 t1
= tcg_temp_new_i32();
10385 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10386 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10387 tcg_gen_and_i32(t0
, t0
, t1
);
10388 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10389 tcg_gen_and_i32(t0
, t0
, t1
);
10390 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10391 tcg_gen_nand_i32(t0
, t0
, t1
);
10392 tcg_temp_free_i32(t1
);
10393 tcg_gen_andi_i32(t0
, t0
, 1);
10394 tcg_gen_extu_i32_tl(bcond
, t0
);
10399 TCGv_i32 t1
= tcg_temp_new_i32();
10400 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10401 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10402 tcg_gen_or_i32(t0
, t0
, t1
);
10403 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10404 tcg_gen_or_i32(t0
, t0
, t1
);
10405 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10406 tcg_gen_or_i32(t0
, t0
, t1
);
10407 tcg_temp_free_i32(t1
);
10408 tcg_gen_andi_i32(t0
, t0
, 1);
10409 tcg_gen_extu_i32_tl(bcond
, t0
);
10412 ctx
->hflags
|= MIPS_HFLAG_BC
;
10415 MIPS_INVAL("cp1 cond branch");
10416 generate_exception_end(ctx
, EXCP_RI
);
10419 ctx
->btarget
= btarget
;
10420 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10422 tcg_temp_free_i32(t0
);
10425 /* R6 CP1 Branches */
10426 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10427 int32_t ft
, int32_t offset
,
10428 int delayslot_size
)
10430 target_ulong btarget
;
10431 TCGv_i64 t0
= tcg_temp_new_i64();
10433 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10434 #ifdef MIPS_DEBUG_DISAS
10435 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10436 "\n", ctx
->base
.pc_next
);
10438 generate_exception_end(ctx
, EXCP_RI
);
10442 gen_load_fpr64(ctx
, t0
, ft
);
10443 tcg_gen_andi_i64(t0
, t0
, 1);
10445 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10449 tcg_gen_xori_i64(t0
, t0
, 1);
10450 ctx
->hflags
|= MIPS_HFLAG_BC
;
10453 /* t0 already set */
10454 ctx
->hflags
|= MIPS_HFLAG_BC
;
10457 MIPS_INVAL("cp1 cond branch");
10458 generate_exception_end(ctx
, EXCP_RI
);
10462 tcg_gen_trunc_i64_tl(bcond
, t0
);
10464 ctx
->btarget
= btarget
;
10466 switch (delayslot_size
) {
10468 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10471 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10476 tcg_temp_free_i64(t0
);
10479 /* Coprocessor 1 (FPU) */
10481 #define FOP(func, fmt) (((fmt) << 21) | (func))
10484 OPC_ADD_S
= FOP(0, FMT_S
),
10485 OPC_SUB_S
= FOP(1, FMT_S
),
10486 OPC_MUL_S
= FOP(2, FMT_S
),
10487 OPC_DIV_S
= FOP(3, FMT_S
),
10488 OPC_SQRT_S
= FOP(4, FMT_S
),
10489 OPC_ABS_S
= FOP(5, FMT_S
),
10490 OPC_MOV_S
= FOP(6, FMT_S
),
10491 OPC_NEG_S
= FOP(7, FMT_S
),
10492 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10493 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10494 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10495 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10496 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10497 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10498 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10499 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10500 OPC_SEL_S
= FOP(16, FMT_S
),
10501 OPC_MOVCF_S
= FOP(17, FMT_S
),
10502 OPC_MOVZ_S
= FOP(18, FMT_S
),
10503 OPC_MOVN_S
= FOP(19, FMT_S
),
10504 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10505 OPC_RECIP_S
= FOP(21, FMT_S
),
10506 OPC_RSQRT_S
= FOP(22, FMT_S
),
10507 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10508 OPC_MADDF_S
= FOP(24, FMT_S
),
10509 OPC_MSUBF_S
= FOP(25, FMT_S
),
10510 OPC_RINT_S
= FOP(26, FMT_S
),
10511 OPC_CLASS_S
= FOP(27, FMT_S
),
10512 OPC_MIN_S
= FOP(28, FMT_S
),
10513 OPC_RECIP2_S
= FOP(28, FMT_S
),
10514 OPC_MINA_S
= FOP(29, FMT_S
),
10515 OPC_RECIP1_S
= FOP(29, FMT_S
),
10516 OPC_MAX_S
= FOP(30, FMT_S
),
10517 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10518 OPC_MAXA_S
= FOP(31, FMT_S
),
10519 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10520 OPC_CVT_D_S
= FOP(33, FMT_S
),
10521 OPC_CVT_W_S
= FOP(36, FMT_S
),
10522 OPC_CVT_L_S
= FOP(37, FMT_S
),
10523 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10524 OPC_CMP_F_S
= FOP(48, FMT_S
),
10525 OPC_CMP_UN_S
= FOP(49, FMT_S
),
10526 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
10527 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
10528 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
10529 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
10530 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
10531 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
10532 OPC_CMP_SF_S
= FOP(56, FMT_S
),
10533 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
10534 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
10535 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
10536 OPC_CMP_LT_S
= FOP(60, FMT_S
),
10537 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
10538 OPC_CMP_LE_S
= FOP(62, FMT_S
),
10539 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
10541 OPC_ADD_D
= FOP(0, FMT_D
),
10542 OPC_SUB_D
= FOP(1, FMT_D
),
10543 OPC_MUL_D
= FOP(2, FMT_D
),
10544 OPC_DIV_D
= FOP(3, FMT_D
),
10545 OPC_SQRT_D
= FOP(4, FMT_D
),
10546 OPC_ABS_D
= FOP(5, FMT_D
),
10547 OPC_MOV_D
= FOP(6, FMT_D
),
10548 OPC_NEG_D
= FOP(7, FMT_D
),
10549 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10550 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10551 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10552 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10553 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10554 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10555 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10556 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10557 OPC_SEL_D
= FOP(16, FMT_D
),
10558 OPC_MOVCF_D
= FOP(17, FMT_D
),
10559 OPC_MOVZ_D
= FOP(18, FMT_D
),
10560 OPC_MOVN_D
= FOP(19, FMT_D
),
10561 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10562 OPC_RECIP_D
= FOP(21, FMT_D
),
10563 OPC_RSQRT_D
= FOP(22, FMT_D
),
10564 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10565 OPC_MADDF_D
= FOP(24, FMT_D
),
10566 OPC_MSUBF_D
= FOP(25, FMT_D
),
10567 OPC_RINT_D
= FOP(26, FMT_D
),
10568 OPC_CLASS_D
= FOP(27, FMT_D
),
10569 OPC_MIN_D
= FOP(28, FMT_D
),
10570 OPC_RECIP2_D
= FOP(28, FMT_D
),
10571 OPC_MINA_D
= FOP(29, FMT_D
),
10572 OPC_RECIP1_D
= FOP(29, FMT_D
),
10573 OPC_MAX_D
= FOP(30, FMT_D
),
10574 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10575 OPC_MAXA_D
= FOP(31, FMT_D
),
10576 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10577 OPC_CVT_S_D
= FOP(32, FMT_D
),
10578 OPC_CVT_W_D
= FOP(36, FMT_D
),
10579 OPC_CVT_L_D
= FOP(37, FMT_D
),
10580 OPC_CMP_F_D
= FOP(48, FMT_D
),
10581 OPC_CMP_UN_D
= FOP(49, FMT_D
),
10582 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
10583 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
10584 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
10585 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
10586 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
10587 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
10588 OPC_CMP_SF_D
= FOP(56, FMT_D
),
10589 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
10590 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
10591 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
10592 OPC_CMP_LT_D
= FOP(60, FMT_D
),
10593 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
10594 OPC_CMP_LE_D
= FOP(62, FMT_D
),
10595 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
10597 OPC_CVT_S_W
= FOP(32, FMT_W
),
10598 OPC_CVT_D_W
= FOP(33, FMT_W
),
10599 OPC_CVT_S_L
= FOP(32, FMT_L
),
10600 OPC_CVT_D_L
= FOP(33, FMT_L
),
10601 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10603 OPC_ADD_PS
= FOP(0, FMT_PS
),
10604 OPC_SUB_PS
= FOP(1, FMT_PS
),
10605 OPC_MUL_PS
= FOP(2, FMT_PS
),
10606 OPC_DIV_PS
= FOP(3, FMT_PS
),
10607 OPC_ABS_PS
= FOP(5, FMT_PS
),
10608 OPC_MOV_PS
= FOP(6, FMT_PS
),
10609 OPC_NEG_PS
= FOP(7, FMT_PS
),
10610 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10611 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10612 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10613 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10614 OPC_MULR_PS
= FOP(26, FMT_PS
),
10615 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10616 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10617 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10618 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10620 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10621 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10622 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10623 OPC_PLL_PS
= FOP(44, FMT_PS
),
10624 OPC_PLU_PS
= FOP(45, FMT_PS
),
10625 OPC_PUL_PS
= FOP(46, FMT_PS
),
10626 OPC_PUU_PS
= FOP(47, FMT_PS
),
10627 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
10628 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
10629 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
10630 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
10631 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
10632 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
10633 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
10634 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
10635 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
10636 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
10637 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
10638 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
10639 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
10640 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
10641 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
10642 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
10646 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10647 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10648 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10649 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10650 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10651 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10652 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10653 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10654 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10655 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10656 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10657 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10658 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10659 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10660 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10661 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10662 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10663 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10664 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10665 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10666 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10667 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10669 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10670 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10671 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10672 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10673 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10674 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10675 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10676 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10677 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10678 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10679 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10680 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10681 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10682 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10683 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10684 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10685 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10686 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10687 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10688 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10689 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10690 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10693 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10695 TCGv t0
= tcg_temp_new();
10700 TCGv_i32 fp0
= tcg_temp_new_i32();
10702 gen_load_fpr32(ctx
, fp0
, fs
);
10703 tcg_gen_ext_i32_tl(t0
, fp0
);
10704 tcg_temp_free_i32(fp0
);
10706 gen_store_gpr(t0
, rt
);
10709 gen_load_gpr(t0
, rt
);
10711 TCGv_i32 fp0
= tcg_temp_new_i32();
10713 tcg_gen_trunc_tl_i32(fp0
, t0
);
10714 gen_store_fpr32(ctx
, fp0
, fs
);
10715 tcg_temp_free_i32(fp0
);
10719 gen_helper_1e0i(cfc1
, t0
, fs
);
10720 gen_store_gpr(t0
, rt
);
10723 gen_load_gpr(t0
, rt
);
10724 save_cpu_state(ctx
, 0);
10726 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10728 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10729 tcg_temp_free_i32(fs_tmp
);
10731 /* Stop translation as we may have changed hflags */
10732 ctx
->base
.is_jmp
= DISAS_STOP
;
10734 #if defined(TARGET_MIPS64)
10736 gen_load_fpr64(ctx
, t0
, fs
);
10737 gen_store_gpr(t0
, rt
);
10740 gen_load_gpr(t0
, rt
);
10741 gen_store_fpr64(ctx
, t0
, fs
);
10746 TCGv_i32 fp0
= tcg_temp_new_i32();
10748 gen_load_fpr32h(ctx
, fp0
, fs
);
10749 tcg_gen_ext_i32_tl(t0
, fp0
);
10750 tcg_temp_free_i32(fp0
);
10752 gen_store_gpr(t0
, rt
);
10755 gen_load_gpr(t0
, rt
);
10757 TCGv_i32 fp0
= tcg_temp_new_i32();
10759 tcg_gen_trunc_tl_i32(fp0
, t0
);
10760 gen_store_fpr32h(ctx
, fp0
, fs
);
10761 tcg_temp_free_i32(fp0
);
10765 MIPS_INVAL("cp1 move");
10766 generate_exception_end(ctx
, EXCP_RI
);
10774 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10781 /* Treat as NOP. */
10786 cond
= TCG_COND_EQ
;
10788 cond
= TCG_COND_NE
;
10791 l1
= gen_new_label();
10792 t0
= tcg_temp_new_i32();
10793 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10794 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10795 tcg_temp_free_i32(t0
);
10797 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10799 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10804 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10808 TCGv_i32 t0
= tcg_temp_new_i32();
10809 TCGLabel
*l1
= gen_new_label();
10812 cond
= TCG_COND_EQ
;
10814 cond
= TCG_COND_NE
;
10817 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10818 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10819 gen_load_fpr32(ctx
, t0
, fs
);
10820 gen_store_fpr32(ctx
, t0
, fd
);
10822 tcg_temp_free_i32(t0
);
10825 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10829 TCGv_i32 t0
= tcg_temp_new_i32();
10831 TCGLabel
*l1
= gen_new_label();
10834 cond
= TCG_COND_EQ
;
10836 cond
= TCG_COND_NE
;
10839 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10840 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10841 tcg_temp_free_i32(t0
);
10842 fp0
= tcg_temp_new_i64();
10843 gen_load_fpr64(ctx
, fp0
, fs
);
10844 gen_store_fpr64(ctx
, fp0
, fd
);
10845 tcg_temp_free_i64(fp0
);
10849 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10853 TCGv_i32 t0
= tcg_temp_new_i32();
10854 TCGLabel
*l1
= gen_new_label();
10855 TCGLabel
*l2
= gen_new_label();
10858 cond
= TCG_COND_EQ
;
10860 cond
= TCG_COND_NE
;
10863 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10864 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10865 gen_load_fpr32(ctx
, t0
, fs
);
10866 gen_store_fpr32(ctx
, t0
, fd
);
10869 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10870 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10871 gen_load_fpr32h(ctx
, t0
, fs
);
10872 gen_store_fpr32h(ctx
, t0
, fd
);
10873 tcg_temp_free_i32(t0
);
10877 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10880 TCGv_i32 t1
= tcg_const_i32(0);
10881 TCGv_i32 fp0
= tcg_temp_new_i32();
10882 TCGv_i32 fp1
= tcg_temp_new_i32();
10883 TCGv_i32 fp2
= tcg_temp_new_i32();
10884 gen_load_fpr32(ctx
, fp0
, fd
);
10885 gen_load_fpr32(ctx
, fp1
, ft
);
10886 gen_load_fpr32(ctx
, fp2
, fs
);
10890 tcg_gen_andi_i32(fp0
, fp0
, 1);
10891 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10894 tcg_gen_andi_i32(fp1
, fp1
, 1);
10895 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10898 tcg_gen_andi_i32(fp1
, fp1
, 1);
10899 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10902 MIPS_INVAL("gen_sel_s");
10903 generate_exception_end(ctx
, EXCP_RI
);
10907 gen_store_fpr32(ctx
, fp0
, fd
);
10908 tcg_temp_free_i32(fp2
);
10909 tcg_temp_free_i32(fp1
);
10910 tcg_temp_free_i32(fp0
);
10911 tcg_temp_free_i32(t1
);
10914 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10917 TCGv_i64 t1
= tcg_const_i64(0);
10918 TCGv_i64 fp0
= tcg_temp_new_i64();
10919 TCGv_i64 fp1
= tcg_temp_new_i64();
10920 TCGv_i64 fp2
= tcg_temp_new_i64();
10921 gen_load_fpr64(ctx
, fp0
, fd
);
10922 gen_load_fpr64(ctx
, fp1
, ft
);
10923 gen_load_fpr64(ctx
, fp2
, fs
);
10927 tcg_gen_andi_i64(fp0
, fp0
, 1);
10928 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10931 tcg_gen_andi_i64(fp1
, fp1
, 1);
10932 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10935 tcg_gen_andi_i64(fp1
, fp1
, 1);
10936 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10939 MIPS_INVAL("gen_sel_d");
10940 generate_exception_end(ctx
, EXCP_RI
);
10944 gen_store_fpr64(ctx
, fp0
, fd
);
10945 tcg_temp_free_i64(fp2
);
10946 tcg_temp_free_i64(fp1
);
10947 tcg_temp_free_i64(fp0
);
10948 tcg_temp_free_i64(t1
);
10951 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
10952 int ft
, int fs
, int fd
, int cc
)
10954 uint32_t func
= ctx
->opcode
& 0x3f;
10958 TCGv_i32 fp0
= tcg_temp_new_i32();
10959 TCGv_i32 fp1
= tcg_temp_new_i32();
10961 gen_load_fpr32(ctx
, fp0
, fs
);
10962 gen_load_fpr32(ctx
, fp1
, ft
);
10963 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
10964 tcg_temp_free_i32(fp1
);
10965 gen_store_fpr32(ctx
, fp0
, fd
);
10966 tcg_temp_free_i32(fp0
);
10971 TCGv_i32 fp0
= tcg_temp_new_i32();
10972 TCGv_i32 fp1
= tcg_temp_new_i32();
10974 gen_load_fpr32(ctx
, fp0
, fs
);
10975 gen_load_fpr32(ctx
, fp1
, ft
);
10976 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
10977 tcg_temp_free_i32(fp1
);
10978 gen_store_fpr32(ctx
, fp0
, fd
);
10979 tcg_temp_free_i32(fp0
);
10984 TCGv_i32 fp0
= tcg_temp_new_i32();
10985 TCGv_i32 fp1
= tcg_temp_new_i32();
10987 gen_load_fpr32(ctx
, fp0
, fs
);
10988 gen_load_fpr32(ctx
, fp1
, ft
);
10989 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
10990 tcg_temp_free_i32(fp1
);
10991 gen_store_fpr32(ctx
, fp0
, fd
);
10992 tcg_temp_free_i32(fp0
);
10997 TCGv_i32 fp0
= tcg_temp_new_i32();
10998 TCGv_i32 fp1
= tcg_temp_new_i32();
11000 gen_load_fpr32(ctx
, fp0
, fs
);
11001 gen_load_fpr32(ctx
, fp1
, ft
);
11002 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
11003 tcg_temp_free_i32(fp1
);
11004 gen_store_fpr32(ctx
, fp0
, fd
);
11005 tcg_temp_free_i32(fp0
);
11010 TCGv_i32 fp0
= tcg_temp_new_i32();
11012 gen_load_fpr32(ctx
, fp0
, fs
);
11013 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
11014 gen_store_fpr32(ctx
, fp0
, fd
);
11015 tcg_temp_free_i32(fp0
);
11020 TCGv_i32 fp0
= tcg_temp_new_i32();
11022 gen_load_fpr32(ctx
, fp0
, fs
);
11023 if (ctx
->abs2008
) {
11024 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11026 gen_helper_float_abs_s(fp0
, fp0
);
11028 gen_store_fpr32(ctx
, fp0
, fd
);
11029 tcg_temp_free_i32(fp0
);
11034 TCGv_i32 fp0
= tcg_temp_new_i32();
11036 gen_load_fpr32(ctx
, fp0
, fs
);
11037 gen_store_fpr32(ctx
, fp0
, fd
);
11038 tcg_temp_free_i32(fp0
);
11043 TCGv_i32 fp0
= tcg_temp_new_i32();
11045 gen_load_fpr32(ctx
, fp0
, fs
);
11046 if (ctx
->abs2008
) {
11047 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11049 gen_helper_float_chs_s(fp0
, fp0
);
11051 gen_store_fpr32(ctx
, fp0
, fd
);
11052 tcg_temp_free_i32(fp0
);
11055 case OPC_ROUND_L_S
:
11056 check_cp1_64bitmode(ctx
);
11058 TCGv_i32 fp32
= tcg_temp_new_i32();
11059 TCGv_i64 fp64
= tcg_temp_new_i64();
11061 gen_load_fpr32(ctx
, fp32
, fs
);
11062 if (ctx
->nan2008
) {
11063 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11065 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11067 tcg_temp_free_i32(fp32
);
11068 gen_store_fpr64(ctx
, fp64
, fd
);
11069 tcg_temp_free_i64(fp64
);
11072 case OPC_TRUNC_L_S
:
11073 check_cp1_64bitmode(ctx
);
11075 TCGv_i32 fp32
= tcg_temp_new_i32();
11076 TCGv_i64 fp64
= tcg_temp_new_i64();
11078 gen_load_fpr32(ctx
, fp32
, fs
);
11079 if (ctx
->nan2008
) {
11080 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11082 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11084 tcg_temp_free_i32(fp32
);
11085 gen_store_fpr64(ctx
, fp64
, fd
);
11086 tcg_temp_free_i64(fp64
);
11090 check_cp1_64bitmode(ctx
);
11092 TCGv_i32 fp32
= tcg_temp_new_i32();
11093 TCGv_i64 fp64
= tcg_temp_new_i64();
11095 gen_load_fpr32(ctx
, fp32
, fs
);
11096 if (ctx
->nan2008
) {
11097 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11099 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11101 tcg_temp_free_i32(fp32
);
11102 gen_store_fpr64(ctx
, fp64
, fd
);
11103 tcg_temp_free_i64(fp64
);
11106 case OPC_FLOOR_L_S
:
11107 check_cp1_64bitmode(ctx
);
11109 TCGv_i32 fp32
= tcg_temp_new_i32();
11110 TCGv_i64 fp64
= tcg_temp_new_i64();
11112 gen_load_fpr32(ctx
, fp32
, fs
);
11113 if (ctx
->nan2008
) {
11114 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11116 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11118 tcg_temp_free_i32(fp32
);
11119 gen_store_fpr64(ctx
, fp64
, fd
);
11120 tcg_temp_free_i64(fp64
);
11123 case OPC_ROUND_W_S
:
11125 TCGv_i32 fp0
= tcg_temp_new_i32();
11127 gen_load_fpr32(ctx
, fp0
, fs
);
11128 if (ctx
->nan2008
) {
11129 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11131 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11133 gen_store_fpr32(ctx
, fp0
, fd
);
11134 tcg_temp_free_i32(fp0
);
11137 case OPC_TRUNC_W_S
:
11139 TCGv_i32 fp0
= tcg_temp_new_i32();
11141 gen_load_fpr32(ctx
, fp0
, fs
);
11142 if (ctx
->nan2008
) {
11143 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11145 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11147 gen_store_fpr32(ctx
, fp0
, fd
);
11148 tcg_temp_free_i32(fp0
);
11153 TCGv_i32 fp0
= tcg_temp_new_i32();
11155 gen_load_fpr32(ctx
, fp0
, fs
);
11156 if (ctx
->nan2008
) {
11157 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11159 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11161 gen_store_fpr32(ctx
, fp0
, fd
);
11162 tcg_temp_free_i32(fp0
);
11165 case OPC_FLOOR_W_S
:
11167 TCGv_i32 fp0
= tcg_temp_new_i32();
11169 gen_load_fpr32(ctx
, fp0
, fs
);
11170 if (ctx
->nan2008
) {
11171 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11173 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11175 gen_store_fpr32(ctx
, fp0
, fd
);
11176 tcg_temp_free_i32(fp0
);
11180 check_insn(ctx
, ISA_MIPS32R6
);
11181 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11184 check_insn(ctx
, ISA_MIPS32R6
);
11185 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11188 check_insn(ctx
, ISA_MIPS32R6
);
11189 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11193 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11196 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11198 TCGLabel
*l1
= gen_new_label();
11202 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11204 fp0
= tcg_temp_new_i32();
11205 gen_load_fpr32(ctx
, fp0
, fs
);
11206 gen_store_fpr32(ctx
, fp0
, fd
);
11207 tcg_temp_free_i32(fp0
);
11212 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11214 TCGLabel
*l1
= gen_new_label();
11218 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11219 fp0
= tcg_temp_new_i32();
11220 gen_load_fpr32(ctx
, fp0
, fs
);
11221 gen_store_fpr32(ctx
, fp0
, fd
);
11222 tcg_temp_free_i32(fp0
);
11229 TCGv_i32 fp0
= tcg_temp_new_i32();
11231 gen_load_fpr32(ctx
, fp0
, fs
);
11232 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11233 gen_store_fpr32(ctx
, fp0
, fd
);
11234 tcg_temp_free_i32(fp0
);
11239 TCGv_i32 fp0
= tcg_temp_new_i32();
11241 gen_load_fpr32(ctx
, fp0
, fs
);
11242 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11243 gen_store_fpr32(ctx
, fp0
, fd
);
11244 tcg_temp_free_i32(fp0
);
11248 check_insn(ctx
, ISA_MIPS32R6
);
11250 TCGv_i32 fp0
= tcg_temp_new_i32();
11251 TCGv_i32 fp1
= tcg_temp_new_i32();
11252 TCGv_i32 fp2
= tcg_temp_new_i32();
11253 gen_load_fpr32(ctx
, fp0
, fs
);
11254 gen_load_fpr32(ctx
, fp1
, ft
);
11255 gen_load_fpr32(ctx
, fp2
, fd
);
11256 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11257 gen_store_fpr32(ctx
, fp2
, fd
);
11258 tcg_temp_free_i32(fp2
);
11259 tcg_temp_free_i32(fp1
);
11260 tcg_temp_free_i32(fp0
);
11264 check_insn(ctx
, ISA_MIPS32R6
);
11266 TCGv_i32 fp0
= tcg_temp_new_i32();
11267 TCGv_i32 fp1
= tcg_temp_new_i32();
11268 TCGv_i32 fp2
= tcg_temp_new_i32();
11269 gen_load_fpr32(ctx
, fp0
, fs
);
11270 gen_load_fpr32(ctx
, fp1
, ft
);
11271 gen_load_fpr32(ctx
, fp2
, fd
);
11272 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11273 gen_store_fpr32(ctx
, fp2
, fd
);
11274 tcg_temp_free_i32(fp2
);
11275 tcg_temp_free_i32(fp1
);
11276 tcg_temp_free_i32(fp0
);
11280 check_insn(ctx
, ISA_MIPS32R6
);
11282 TCGv_i32 fp0
= tcg_temp_new_i32();
11283 gen_load_fpr32(ctx
, fp0
, fs
);
11284 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11285 gen_store_fpr32(ctx
, fp0
, fd
);
11286 tcg_temp_free_i32(fp0
);
11290 check_insn(ctx
, ISA_MIPS32R6
);
11292 TCGv_i32 fp0
= tcg_temp_new_i32();
11293 gen_load_fpr32(ctx
, fp0
, fs
);
11294 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11295 gen_store_fpr32(ctx
, fp0
, fd
);
11296 tcg_temp_free_i32(fp0
);
11299 case OPC_MIN_S
: /* OPC_RECIP2_S */
11300 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11302 TCGv_i32 fp0
= tcg_temp_new_i32();
11303 TCGv_i32 fp1
= tcg_temp_new_i32();
11304 TCGv_i32 fp2
= tcg_temp_new_i32();
11305 gen_load_fpr32(ctx
, fp0
, fs
);
11306 gen_load_fpr32(ctx
, fp1
, ft
);
11307 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11308 gen_store_fpr32(ctx
, fp2
, fd
);
11309 tcg_temp_free_i32(fp2
);
11310 tcg_temp_free_i32(fp1
);
11311 tcg_temp_free_i32(fp0
);
11314 check_cp1_64bitmode(ctx
);
11316 TCGv_i32 fp0
= tcg_temp_new_i32();
11317 TCGv_i32 fp1
= tcg_temp_new_i32();
11319 gen_load_fpr32(ctx
, fp0
, fs
);
11320 gen_load_fpr32(ctx
, fp1
, ft
);
11321 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11322 tcg_temp_free_i32(fp1
);
11323 gen_store_fpr32(ctx
, fp0
, fd
);
11324 tcg_temp_free_i32(fp0
);
11328 case OPC_MINA_S
: /* OPC_RECIP1_S */
11329 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11331 TCGv_i32 fp0
= tcg_temp_new_i32();
11332 TCGv_i32 fp1
= tcg_temp_new_i32();
11333 TCGv_i32 fp2
= tcg_temp_new_i32();
11334 gen_load_fpr32(ctx
, fp0
, fs
);
11335 gen_load_fpr32(ctx
, fp1
, ft
);
11336 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11337 gen_store_fpr32(ctx
, fp2
, fd
);
11338 tcg_temp_free_i32(fp2
);
11339 tcg_temp_free_i32(fp1
);
11340 tcg_temp_free_i32(fp0
);
11343 check_cp1_64bitmode(ctx
);
11345 TCGv_i32 fp0
= tcg_temp_new_i32();
11347 gen_load_fpr32(ctx
, fp0
, fs
);
11348 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11349 gen_store_fpr32(ctx
, fp0
, fd
);
11350 tcg_temp_free_i32(fp0
);
11354 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11355 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11357 TCGv_i32 fp0
= tcg_temp_new_i32();
11358 TCGv_i32 fp1
= tcg_temp_new_i32();
11359 gen_load_fpr32(ctx
, fp0
, fs
);
11360 gen_load_fpr32(ctx
, fp1
, ft
);
11361 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11362 gen_store_fpr32(ctx
, fp1
, fd
);
11363 tcg_temp_free_i32(fp1
);
11364 tcg_temp_free_i32(fp0
);
11367 check_cp1_64bitmode(ctx
);
11369 TCGv_i32 fp0
= tcg_temp_new_i32();
11371 gen_load_fpr32(ctx
, fp0
, fs
);
11372 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11373 gen_store_fpr32(ctx
, fp0
, fd
);
11374 tcg_temp_free_i32(fp0
);
11378 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11379 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11381 TCGv_i32 fp0
= tcg_temp_new_i32();
11382 TCGv_i32 fp1
= tcg_temp_new_i32();
11383 gen_load_fpr32(ctx
, fp0
, fs
);
11384 gen_load_fpr32(ctx
, fp1
, ft
);
11385 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11386 gen_store_fpr32(ctx
, fp1
, fd
);
11387 tcg_temp_free_i32(fp1
);
11388 tcg_temp_free_i32(fp0
);
11391 check_cp1_64bitmode(ctx
);
11393 TCGv_i32 fp0
= tcg_temp_new_i32();
11394 TCGv_i32 fp1
= tcg_temp_new_i32();
11396 gen_load_fpr32(ctx
, fp0
, fs
);
11397 gen_load_fpr32(ctx
, fp1
, ft
);
11398 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11399 tcg_temp_free_i32(fp1
);
11400 gen_store_fpr32(ctx
, fp0
, fd
);
11401 tcg_temp_free_i32(fp0
);
11406 check_cp1_registers(ctx
, fd
);
11408 TCGv_i32 fp32
= tcg_temp_new_i32();
11409 TCGv_i64 fp64
= tcg_temp_new_i64();
11411 gen_load_fpr32(ctx
, fp32
, fs
);
11412 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11413 tcg_temp_free_i32(fp32
);
11414 gen_store_fpr64(ctx
, fp64
, fd
);
11415 tcg_temp_free_i64(fp64
);
11420 TCGv_i32 fp0
= tcg_temp_new_i32();
11422 gen_load_fpr32(ctx
, fp0
, fs
);
11423 if (ctx
->nan2008
) {
11424 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11426 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11428 gen_store_fpr32(ctx
, fp0
, fd
);
11429 tcg_temp_free_i32(fp0
);
11433 check_cp1_64bitmode(ctx
);
11435 TCGv_i32 fp32
= tcg_temp_new_i32();
11436 TCGv_i64 fp64
= tcg_temp_new_i64();
11438 gen_load_fpr32(ctx
, fp32
, fs
);
11439 if (ctx
->nan2008
) {
11440 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11442 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11444 tcg_temp_free_i32(fp32
);
11445 gen_store_fpr64(ctx
, fp64
, fd
);
11446 tcg_temp_free_i64(fp64
);
11452 TCGv_i64 fp64
= tcg_temp_new_i64();
11453 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11454 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11456 gen_load_fpr32(ctx
, fp32_0
, fs
);
11457 gen_load_fpr32(ctx
, fp32_1
, ft
);
11458 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11459 tcg_temp_free_i32(fp32_1
);
11460 tcg_temp_free_i32(fp32_0
);
11461 gen_store_fpr64(ctx
, fp64
, fd
);
11462 tcg_temp_free_i64(fp64
);
11468 case OPC_CMP_UEQ_S
:
11469 case OPC_CMP_OLT_S
:
11470 case OPC_CMP_ULT_S
:
11471 case OPC_CMP_OLE_S
:
11472 case OPC_CMP_ULE_S
:
11474 case OPC_CMP_NGLE_S
:
11475 case OPC_CMP_SEQ_S
:
11476 case OPC_CMP_NGL_S
:
11478 case OPC_CMP_NGE_S
:
11480 case OPC_CMP_NGT_S
:
11481 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11482 if (ctx
->opcode
& (1 << 6)) {
11483 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11485 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11489 check_cp1_registers(ctx
, fs
| ft
| fd
);
11491 TCGv_i64 fp0
= tcg_temp_new_i64();
11492 TCGv_i64 fp1
= tcg_temp_new_i64();
11494 gen_load_fpr64(ctx
, fp0
, fs
);
11495 gen_load_fpr64(ctx
, fp1
, ft
);
11496 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11497 tcg_temp_free_i64(fp1
);
11498 gen_store_fpr64(ctx
, fp0
, fd
);
11499 tcg_temp_free_i64(fp0
);
11503 check_cp1_registers(ctx
, fs
| ft
| fd
);
11505 TCGv_i64 fp0
= tcg_temp_new_i64();
11506 TCGv_i64 fp1
= tcg_temp_new_i64();
11508 gen_load_fpr64(ctx
, fp0
, fs
);
11509 gen_load_fpr64(ctx
, fp1
, ft
);
11510 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11511 tcg_temp_free_i64(fp1
);
11512 gen_store_fpr64(ctx
, fp0
, fd
);
11513 tcg_temp_free_i64(fp0
);
11517 check_cp1_registers(ctx
, fs
| ft
| fd
);
11519 TCGv_i64 fp0
= tcg_temp_new_i64();
11520 TCGv_i64 fp1
= tcg_temp_new_i64();
11522 gen_load_fpr64(ctx
, fp0
, fs
);
11523 gen_load_fpr64(ctx
, fp1
, ft
);
11524 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11525 tcg_temp_free_i64(fp1
);
11526 gen_store_fpr64(ctx
, fp0
, fd
);
11527 tcg_temp_free_i64(fp0
);
11531 check_cp1_registers(ctx
, fs
| ft
| fd
);
11533 TCGv_i64 fp0
= tcg_temp_new_i64();
11534 TCGv_i64 fp1
= tcg_temp_new_i64();
11536 gen_load_fpr64(ctx
, fp0
, fs
);
11537 gen_load_fpr64(ctx
, fp1
, ft
);
11538 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11539 tcg_temp_free_i64(fp1
);
11540 gen_store_fpr64(ctx
, fp0
, fd
);
11541 tcg_temp_free_i64(fp0
);
11545 check_cp1_registers(ctx
, fs
| fd
);
11547 TCGv_i64 fp0
= tcg_temp_new_i64();
11549 gen_load_fpr64(ctx
, fp0
, fs
);
11550 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11551 gen_store_fpr64(ctx
, fp0
, fd
);
11552 tcg_temp_free_i64(fp0
);
11556 check_cp1_registers(ctx
, fs
| fd
);
11558 TCGv_i64 fp0
= tcg_temp_new_i64();
11560 gen_load_fpr64(ctx
, fp0
, fs
);
11561 if (ctx
->abs2008
) {
11562 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11564 gen_helper_float_abs_d(fp0
, fp0
);
11566 gen_store_fpr64(ctx
, fp0
, fd
);
11567 tcg_temp_free_i64(fp0
);
11571 check_cp1_registers(ctx
, fs
| fd
);
11573 TCGv_i64 fp0
= tcg_temp_new_i64();
11575 gen_load_fpr64(ctx
, fp0
, fs
);
11576 gen_store_fpr64(ctx
, fp0
, fd
);
11577 tcg_temp_free_i64(fp0
);
11581 check_cp1_registers(ctx
, fs
| fd
);
11583 TCGv_i64 fp0
= tcg_temp_new_i64();
11585 gen_load_fpr64(ctx
, fp0
, fs
);
11586 if (ctx
->abs2008
) {
11587 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11589 gen_helper_float_chs_d(fp0
, fp0
);
11591 gen_store_fpr64(ctx
, fp0
, fd
);
11592 tcg_temp_free_i64(fp0
);
11595 case OPC_ROUND_L_D
:
11596 check_cp1_64bitmode(ctx
);
11598 TCGv_i64 fp0
= tcg_temp_new_i64();
11600 gen_load_fpr64(ctx
, fp0
, fs
);
11601 if (ctx
->nan2008
) {
11602 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11604 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11606 gen_store_fpr64(ctx
, fp0
, fd
);
11607 tcg_temp_free_i64(fp0
);
11610 case OPC_TRUNC_L_D
:
11611 check_cp1_64bitmode(ctx
);
11613 TCGv_i64 fp0
= tcg_temp_new_i64();
11615 gen_load_fpr64(ctx
, fp0
, fs
);
11616 if (ctx
->nan2008
) {
11617 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11619 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11621 gen_store_fpr64(ctx
, fp0
, fd
);
11622 tcg_temp_free_i64(fp0
);
11626 check_cp1_64bitmode(ctx
);
11628 TCGv_i64 fp0
= tcg_temp_new_i64();
11630 gen_load_fpr64(ctx
, fp0
, fs
);
11631 if (ctx
->nan2008
) {
11632 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11634 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11636 gen_store_fpr64(ctx
, fp0
, fd
);
11637 tcg_temp_free_i64(fp0
);
11640 case OPC_FLOOR_L_D
:
11641 check_cp1_64bitmode(ctx
);
11643 TCGv_i64 fp0
= tcg_temp_new_i64();
11645 gen_load_fpr64(ctx
, fp0
, fs
);
11646 if (ctx
->nan2008
) {
11647 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11649 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11651 gen_store_fpr64(ctx
, fp0
, fd
);
11652 tcg_temp_free_i64(fp0
);
11655 case OPC_ROUND_W_D
:
11656 check_cp1_registers(ctx
, fs
);
11658 TCGv_i32 fp32
= tcg_temp_new_i32();
11659 TCGv_i64 fp64
= tcg_temp_new_i64();
11661 gen_load_fpr64(ctx
, fp64
, fs
);
11662 if (ctx
->nan2008
) {
11663 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11665 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11667 tcg_temp_free_i64(fp64
);
11668 gen_store_fpr32(ctx
, fp32
, fd
);
11669 tcg_temp_free_i32(fp32
);
11672 case OPC_TRUNC_W_D
:
11673 check_cp1_registers(ctx
, fs
);
11675 TCGv_i32 fp32
= tcg_temp_new_i32();
11676 TCGv_i64 fp64
= tcg_temp_new_i64();
11678 gen_load_fpr64(ctx
, fp64
, fs
);
11679 if (ctx
->nan2008
) {
11680 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11682 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11684 tcg_temp_free_i64(fp64
);
11685 gen_store_fpr32(ctx
, fp32
, fd
);
11686 tcg_temp_free_i32(fp32
);
11690 check_cp1_registers(ctx
, fs
);
11692 TCGv_i32 fp32
= tcg_temp_new_i32();
11693 TCGv_i64 fp64
= tcg_temp_new_i64();
11695 gen_load_fpr64(ctx
, fp64
, fs
);
11696 if (ctx
->nan2008
) {
11697 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11699 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11701 tcg_temp_free_i64(fp64
);
11702 gen_store_fpr32(ctx
, fp32
, fd
);
11703 tcg_temp_free_i32(fp32
);
11706 case OPC_FLOOR_W_D
:
11707 check_cp1_registers(ctx
, fs
);
11709 TCGv_i32 fp32
= tcg_temp_new_i32();
11710 TCGv_i64 fp64
= tcg_temp_new_i64();
11712 gen_load_fpr64(ctx
, fp64
, fs
);
11713 if (ctx
->nan2008
) {
11714 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11716 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11718 tcg_temp_free_i64(fp64
);
11719 gen_store_fpr32(ctx
, fp32
, fd
);
11720 tcg_temp_free_i32(fp32
);
11724 check_insn(ctx
, ISA_MIPS32R6
);
11725 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11728 check_insn(ctx
, ISA_MIPS32R6
);
11729 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11732 check_insn(ctx
, ISA_MIPS32R6
);
11733 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11737 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11740 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11742 TCGLabel
*l1
= gen_new_label();
11746 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11748 fp0
= tcg_temp_new_i64();
11749 gen_load_fpr64(ctx
, fp0
, fs
);
11750 gen_store_fpr64(ctx
, fp0
, fd
);
11751 tcg_temp_free_i64(fp0
);
11756 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11758 TCGLabel
*l1
= gen_new_label();
11762 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11763 fp0
= tcg_temp_new_i64();
11764 gen_load_fpr64(ctx
, fp0
, fs
);
11765 gen_store_fpr64(ctx
, fp0
, fd
);
11766 tcg_temp_free_i64(fp0
);
11772 check_cp1_registers(ctx
, fs
| fd
);
11774 TCGv_i64 fp0
= tcg_temp_new_i64();
11776 gen_load_fpr64(ctx
, fp0
, fs
);
11777 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11778 gen_store_fpr64(ctx
, fp0
, fd
);
11779 tcg_temp_free_i64(fp0
);
11783 check_cp1_registers(ctx
, fs
| fd
);
11785 TCGv_i64 fp0
= tcg_temp_new_i64();
11787 gen_load_fpr64(ctx
, fp0
, fs
);
11788 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11789 gen_store_fpr64(ctx
, fp0
, fd
);
11790 tcg_temp_free_i64(fp0
);
11794 check_insn(ctx
, ISA_MIPS32R6
);
11796 TCGv_i64 fp0
= tcg_temp_new_i64();
11797 TCGv_i64 fp1
= tcg_temp_new_i64();
11798 TCGv_i64 fp2
= tcg_temp_new_i64();
11799 gen_load_fpr64(ctx
, fp0
, fs
);
11800 gen_load_fpr64(ctx
, fp1
, ft
);
11801 gen_load_fpr64(ctx
, fp2
, fd
);
11802 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11803 gen_store_fpr64(ctx
, fp2
, fd
);
11804 tcg_temp_free_i64(fp2
);
11805 tcg_temp_free_i64(fp1
);
11806 tcg_temp_free_i64(fp0
);
11810 check_insn(ctx
, ISA_MIPS32R6
);
11812 TCGv_i64 fp0
= tcg_temp_new_i64();
11813 TCGv_i64 fp1
= tcg_temp_new_i64();
11814 TCGv_i64 fp2
= tcg_temp_new_i64();
11815 gen_load_fpr64(ctx
, fp0
, fs
);
11816 gen_load_fpr64(ctx
, fp1
, ft
);
11817 gen_load_fpr64(ctx
, fp2
, fd
);
11818 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11819 gen_store_fpr64(ctx
, fp2
, fd
);
11820 tcg_temp_free_i64(fp2
);
11821 tcg_temp_free_i64(fp1
);
11822 tcg_temp_free_i64(fp0
);
11826 check_insn(ctx
, ISA_MIPS32R6
);
11828 TCGv_i64 fp0
= tcg_temp_new_i64();
11829 gen_load_fpr64(ctx
, fp0
, fs
);
11830 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11831 gen_store_fpr64(ctx
, fp0
, fd
);
11832 tcg_temp_free_i64(fp0
);
11836 check_insn(ctx
, ISA_MIPS32R6
);
11838 TCGv_i64 fp0
= tcg_temp_new_i64();
11839 gen_load_fpr64(ctx
, fp0
, fs
);
11840 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11841 gen_store_fpr64(ctx
, fp0
, fd
);
11842 tcg_temp_free_i64(fp0
);
11845 case OPC_MIN_D
: /* OPC_RECIP2_D */
11846 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11848 TCGv_i64 fp0
= tcg_temp_new_i64();
11849 TCGv_i64 fp1
= tcg_temp_new_i64();
11850 gen_load_fpr64(ctx
, fp0
, fs
);
11851 gen_load_fpr64(ctx
, fp1
, ft
);
11852 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11853 gen_store_fpr64(ctx
, fp1
, fd
);
11854 tcg_temp_free_i64(fp1
);
11855 tcg_temp_free_i64(fp0
);
11858 check_cp1_64bitmode(ctx
);
11860 TCGv_i64 fp0
= tcg_temp_new_i64();
11861 TCGv_i64 fp1
= tcg_temp_new_i64();
11863 gen_load_fpr64(ctx
, fp0
, fs
);
11864 gen_load_fpr64(ctx
, fp1
, ft
);
11865 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11866 tcg_temp_free_i64(fp1
);
11867 gen_store_fpr64(ctx
, fp0
, fd
);
11868 tcg_temp_free_i64(fp0
);
11872 case OPC_MINA_D
: /* OPC_RECIP1_D */
11873 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11875 TCGv_i64 fp0
= tcg_temp_new_i64();
11876 TCGv_i64 fp1
= tcg_temp_new_i64();
11877 gen_load_fpr64(ctx
, fp0
, fs
);
11878 gen_load_fpr64(ctx
, fp1
, ft
);
11879 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11880 gen_store_fpr64(ctx
, fp1
, fd
);
11881 tcg_temp_free_i64(fp1
);
11882 tcg_temp_free_i64(fp0
);
11885 check_cp1_64bitmode(ctx
);
11887 TCGv_i64 fp0
= tcg_temp_new_i64();
11889 gen_load_fpr64(ctx
, fp0
, fs
);
11890 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11891 gen_store_fpr64(ctx
, fp0
, fd
);
11892 tcg_temp_free_i64(fp0
);
11896 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11897 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11899 TCGv_i64 fp0
= tcg_temp_new_i64();
11900 TCGv_i64 fp1
= tcg_temp_new_i64();
11901 gen_load_fpr64(ctx
, fp0
, fs
);
11902 gen_load_fpr64(ctx
, fp1
, ft
);
11903 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11904 gen_store_fpr64(ctx
, fp1
, fd
);
11905 tcg_temp_free_i64(fp1
);
11906 tcg_temp_free_i64(fp0
);
11909 check_cp1_64bitmode(ctx
);
11911 TCGv_i64 fp0
= tcg_temp_new_i64();
11913 gen_load_fpr64(ctx
, fp0
, fs
);
11914 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11915 gen_store_fpr64(ctx
, fp0
, fd
);
11916 tcg_temp_free_i64(fp0
);
11920 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11921 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11923 TCGv_i64 fp0
= tcg_temp_new_i64();
11924 TCGv_i64 fp1
= tcg_temp_new_i64();
11925 gen_load_fpr64(ctx
, fp0
, fs
);
11926 gen_load_fpr64(ctx
, fp1
, ft
);
11927 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11928 gen_store_fpr64(ctx
, fp1
, fd
);
11929 tcg_temp_free_i64(fp1
);
11930 tcg_temp_free_i64(fp0
);
11933 check_cp1_64bitmode(ctx
);
11935 TCGv_i64 fp0
= tcg_temp_new_i64();
11936 TCGv_i64 fp1
= tcg_temp_new_i64();
11938 gen_load_fpr64(ctx
, fp0
, fs
);
11939 gen_load_fpr64(ctx
, fp1
, ft
);
11940 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
11941 tcg_temp_free_i64(fp1
);
11942 gen_store_fpr64(ctx
, fp0
, fd
);
11943 tcg_temp_free_i64(fp0
);
11950 case OPC_CMP_UEQ_D
:
11951 case OPC_CMP_OLT_D
:
11952 case OPC_CMP_ULT_D
:
11953 case OPC_CMP_OLE_D
:
11954 case OPC_CMP_ULE_D
:
11956 case OPC_CMP_NGLE_D
:
11957 case OPC_CMP_SEQ_D
:
11958 case OPC_CMP_NGL_D
:
11960 case OPC_CMP_NGE_D
:
11962 case OPC_CMP_NGT_D
:
11963 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11964 if (ctx
->opcode
& (1 << 6)) {
11965 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
11967 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
11971 check_cp1_registers(ctx
, fs
);
11973 TCGv_i32 fp32
= tcg_temp_new_i32();
11974 TCGv_i64 fp64
= tcg_temp_new_i64();
11976 gen_load_fpr64(ctx
, fp64
, fs
);
11977 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
11978 tcg_temp_free_i64(fp64
);
11979 gen_store_fpr32(ctx
, fp32
, fd
);
11980 tcg_temp_free_i32(fp32
);
11984 check_cp1_registers(ctx
, fs
);
11986 TCGv_i32 fp32
= tcg_temp_new_i32();
11987 TCGv_i64 fp64
= tcg_temp_new_i64();
11989 gen_load_fpr64(ctx
, fp64
, fs
);
11990 if (ctx
->nan2008
) {
11991 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
11993 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
11995 tcg_temp_free_i64(fp64
);
11996 gen_store_fpr32(ctx
, fp32
, fd
);
11997 tcg_temp_free_i32(fp32
);
12001 check_cp1_64bitmode(ctx
);
12003 TCGv_i64 fp0
= tcg_temp_new_i64();
12005 gen_load_fpr64(ctx
, fp0
, fs
);
12006 if (ctx
->nan2008
) {
12007 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
12009 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
12011 gen_store_fpr64(ctx
, fp0
, fd
);
12012 tcg_temp_free_i64(fp0
);
12017 TCGv_i32 fp0
= tcg_temp_new_i32();
12019 gen_load_fpr32(ctx
, fp0
, fs
);
12020 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
12021 gen_store_fpr32(ctx
, fp0
, fd
);
12022 tcg_temp_free_i32(fp0
);
12026 check_cp1_registers(ctx
, fd
);
12028 TCGv_i32 fp32
= tcg_temp_new_i32();
12029 TCGv_i64 fp64
= tcg_temp_new_i64();
12031 gen_load_fpr32(ctx
, fp32
, fs
);
12032 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12033 tcg_temp_free_i32(fp32
);
12034 gen_store_fpr64(ctx
, fp64
, fd
);
12035 tcg_temp_free_i64(fp64
);
12039 check_cp1_64bitmode(ctx
);
12041 TCGv_i32 fp32
= tcg_temp_new_i32();
12042 TCGv_i64 fp64
= tcg_temp_new_i64();
12044 gen_load_fpr64(ctx
, fp64
, fs
);
12045 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12046 tcg_temp_free_i64(fp64
);
12047 gen_store_fpr32(ctx
, fp32
, fd
);
12048 tcg_temp_free_i32(fp32
);
12052 check_cp1_64bitmode(ctx
);
12054 TCGv_i64 fp0
= tcg_temp_new_i64();
12056 gen_load_fpr64(ctx
, fp0
, fs
);
12057 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12058 gen_store_fpr64(ctx
, fp0
, fd
);
12059 tcg_temp_free_i64(fp0
);
12062 case OPC_CVT_PS_PW
:
12065 TCGv_i64 fp0
= tcg_temp_new_i64();
12067 gen_load_fpr64(ctx
, fp0
, fs
);
12068 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12069 gen_store_fpr64(ctx
, fp0
, fd
);
12070 tcg_temp_free_i64(fp0
);
12076 TCGv_i64 fp0
= tcg_temp_new_i64();
12077 TCGv_i64 fp1
= tcg_temp_new_i64();
12079 gen_load_fpr64(ctx
, fp0
, fs
);
12080 gen_load_fpr64(ctx
, fp1
, ft
);
12081 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12082 tcg_temp_free_i64(fp1
);
12083 gen_store_fpr64(ctx
, fp0
, fd
);
12084 tcg_temp_free_i64(fp0
);
12090 TCGv_i64 fp0
= tcg_temp_new_i64();
12091 TCGv_i64 fp1
= tcg_temp_new_i64();
12093 gen_load_fpr64(ctx
, fp0
, fs
);
12094 gen_load_fpr64(ctx
, fp1
, ft
);
12095 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12096 tcg_temp_free_i64(fp1
);
12097 gen_store_fpr64(ctx
, fp0
, fd
);
12098 tcg_temp_free_i64(fp0
);
12104 TCGv_i64 fp0
= tcg_temp_new_i64();
12105 TCGv_i64 fp1
= tcg_temp_new_i64();
12107 gen_load_fpr64(ctx
, fp0
, fs
);
12108 gen_load_fpr64(ctx
, fp1
, ft
);
12109 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12110 tcg_temp_free_i64(fp1
);
12111 gen_store_fpr64(ctx
, fp0
, fd
);
12112 tcg_temp_free_i64(fp0
);
12118 TCGv_i64 fp0
= tcg_temp_new_i64();
12120 gen_load_fpr64(ctx
, fp0
, fs
);
12121 gen_helper_float_abs_ps(fp0
, fp0
);
12122 gen_store_fpr64(ctx
, fp0
, fd
);
12123 tcg_temp_free_i64(fp0
);
12129 TCGv_i64 fp0
= tcg_temp_new_i64();
12131 gen_load_fpr64(ctx
, fp0
, fs
);
12132 gen_store_fpr64(ctx
, fp0
, fd
);
12133 tcg_temp_free_i64(fp0
);
12139 TCGv_i64 fp0
= tcg_temp_new_i64();
12141 gen_load_fpr64(ctx
, fp0
, fs
);
12142 gen_helper_float_chs_ps(fp0
, fp0
);
12143 gen_store_fpr64(ctx
, fp0
, fd
);
12144 tcg_temp_free_i64(fp0
);
12149 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12154 TCGLabel
*l1
= gen_new_label();
12158 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12160 fp0
= tcg_temp_new_i64();
12161 gen_load_fpr64(ctx
, fp0
, fs
);
12162 gen_store_fpr64(ctx
, fp0
, fd
);
12163 tcg_temp_free_i64(fp0
);
12170 TCGLabel
*l1
= gen_new_label();
12174 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12175 fp0
= tcg_temp_new_i64();
12176 gen_load_fpr64(ctx
, fp0
, fs
);
12177 gen_store_fpr64(ctx
, fp0
, fd
);
12178 tcg_temp_free_i64(fp0
);
12186 TCGv_i64 fp0
= tcg_temp_new_i64();
12187 TCGv_i64 fp1
= tcg_temp_new_i64();
12189 gen_load_fpr64(ctx
, fp0
, ft
);
12190 gen_load_fpr64(ctx
, fp1
, fs
);
12191 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12192 tcg_temp_free_i64(fp1
);
12193 gen_store_fpr64(ctx
, fp0
, fd
);
12194 tcg_temp_free_i64(fp0
);
12200 TCGv_i64 fp0
= tcg_temp_new_i64();
12201 TCGv_i64 fp1
= tcg_temp_new_i64();
12203 gen_load_fpr64(ctx
, fp0
, ft
);
12204 gen_load_fpr64(ctx
, fp1
, fs
);
12205 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12206 tcg_temp_free_i64(fp1
);
12207 gen_store_fpr64(ctx
, fp0
, fd
);
12208 tcg_temp_free_i64(fp0
);
12211 case OPC_RECIP2_PS
:
12214 TCGv_i64 fp0
= tcg_temp_new_i64();
12215 TCGv_i64 fp1
= tcg_temp_new_i64();
12217 gen_load_fpr64(ctx
, fp0
, fs
);
12218 gen_load_fpr64(ctx
, fp1
, ft
);
12219 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12220 tcg_temp_free_i64(fp1
);
12221 gen_store_fpr64(ctx
, fp0
, fd
);
12222 tcg_temp_free_i64(fp0
);
12225 case OPC_RECIP1_PS
:
12228 TCGv_i64 fp0
= tcg_temp_new_i64();
12230 gen_load_fpr64(ctx
, fp0
, fs
);
12231 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12232 gen_store_fpr64(ctx
, fp0
, fd
);
12233 tcg_temp_free_i64(fp0
);
12236 case OPC_RSQRT1_PS
:
12239 TCGv_i64 fp0
= tcg_temp_new_i64();
12241 gen_load_fpr64(ctx
, fp0
, fs
);
12242 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12243 gen_store_fpr64(ctx
, fp0
, fd
);
12244 tcg_temp_free_i64(fp0
);
12247 case OPC_RSQRT2_PS
:
12250 TCGv_i64 fp0
= tcg_temp_new_i64();
12251 TCGv_i64 fp1
= tcg_temp_new_i64();
12253 gen_load_fpr64(ctx
, fp0
, fs
);
12254 gen_load_fpr64(ctx
, fp1
, ft
);
12255 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12256 tcg_temp_free_i64(fp1
);
12257 gen_store_fpr64(ctx
, fp0
, fd
);
12258 tcg_temp_free_i64(fp0
);
12262 check_cp1_64bitmode(ctx
);
12264 TCGv_i32 fp0
= tcg_temp_new_i32();
12266 gen_load_fpr32h(ctx
, fp0
, fs
);
12267 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12268 gen_store_fpr32(ctx
, fp0
, fd
);
12269 tcg_temp_free_i32(fp0
);
12272 case OPC_CVT_PW_PS
:
12275 TCGv_i64 fp0
= tcg_temp_new_i64();
12277 gen_load_fpr64(ctx
, fp0
, fs
);
12278 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12279 gen_store_fpr64(ctx
, fp0
, fd
);
12280 tcg_temp_free_i64(fp0
);
12284 check_cp1_64bitmode(ctx
);
12286 TCGv_i32 fp0
= tcg_temp_new_i32();
12288 gen_load_fpr32(ctx
, fp0
, fs
);
12289 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12290 gen_store_fpr32(ctx
, fp0
, fd
);
12291 tcg_temp_free_i32(fp0
);
12297 TCGv_i32 fp0
= tcg_temp_new_i32();
12298 TCGv_i32 fp1
= tcg_temp_new_i32();
12300 gen_load_fpr32(ctx
, fp0
, fs
);
12301 gen_load_fpr32(ctx
, fp1
, ft
);
12302 gen_store_fpr32h(ctx
, fp0
, fd
);
12303 gen_store_fpr32(ctx
, fp1
, fd
);
12304 tcg_temp_free_i32(fp0
);
12305 tcg_temp_free_i32(fp1
);
12311 TCGv_i32 fp0
= tcg_temp_new_i32();
12312 TCGv_i32 fp1
= tcg_temp_new_i32();
12314 gen_load_fpr32(ctx
, fp0
, fs
);
12315 gen_load_fpr32h(ctx
, fp1
, ft
);
12316 gen_store_fpr32(ctx
, fp1
, fd
);
12317 gen_store_fpr32h(ctx
, fp0
, fd
);
12318 tcg_temp_free_i32(fp0
);
12319 tcg_temp_free_i32(fp1
);
12325 TCGv_i32 fp0
= tcg_temp_new_i32();
12326 TCGv_i32 fp1
= tcg_temp_new_i32();
12328 gen_load_fpr32h(ctx
, fp0
, fs
);
12329 gen_load_fpr32(ctx
, fp1
, ft
);
12330 gen_store_fpr32(ctx
, fp1
, fd
);
12331 gen_store_fpr32h(ctx
, fp0
, fd
);
12332 tcg_temp_free_i32(fp0
);
12333 tcg_temp_free_i32(fp1
);
12339 TCGv_i32 fp0
= tcg_temp_new_i32();
12340 TCGv_i32 fp1
= tcg_temp_new_i32();
12342 gen_load_fpr32h(ctx
, fp0
, fs
);
12343 gen_load_fpr32h(ctx
, fp1
, ft
);
12344 gen_store_fpr32(ctx
, fp1
, fd
);
12345 gen_store_fpr32h(ctx
, fp0
, fd
);
12346 tcg_temp_free_i32(fp0
);
12347 tcg_temp_free_i32(fp1
);
12351 case OPC_CMP_UN_PS
:
12352 case OPC_CMP_EQ_PS
:
12353 case OPC_CMP_UEQ_PS
:
12354 case OPC_CMP_OLT_PS
:
12355 case OPC_CMP_ULT_PS
:
12356 case OPC_CMP_OLE_PS
:
12357 case OPC_CMP_ULE_PS
:
12358 case OPC_CMP_SF_PS
:
12359 case OPC_CMP_NGLE_PS
:
12360 case OPC_CMP_SEQ_PS
:
12361 case OPC_CMP_NGL_PS
:
12362 case OPC_CMP_LT_PS
:
12363 case OPC_CMP_NGE_PS
:
12364 case OPC_CMP_LE_PS
:
12365 case OPC_CMP_NGT_PS
:
12366 if (ctx
->opcode
& (1 << 6)) {
12367 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12369 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12373 MIPS_INVAL("farith");
12374 generate_exception_end(ctx
, EXCP_RI
);
12379 /* Coprocessor 3 (FPU) */
12380 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
12381 int fd
, int fs
, int base
, int index
)
12383 TCGv t0
= tcg_temp_new();
12386 gen_load_gpr(t0
, index
);
12387 } else if (index
== 0) {
12388 gen_load_gpr(t0
, base
);
12390 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12392 /* Don't do NOP if destination is zero: we must perform the actual
12398 TCGv_i32 fp0
= tcg_temp_new_i32();
12400 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12401 tcg_gen_trunc_tl_i32(fp0
, t0
);
12402 gen_store_fpr32(ctx
, fp0
, fd
);
12403 tcg_temp_free_i32(fp0
);
12408 check_cp1_registers(ctx
, fd
);
12410 TCGv_i64 fp0
= tcg_temp_new_i64();
12411 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12412 gen_store_fpr64(ctx
, fp0
, fd
);
12413 tcg_temp_free_i64(fp0
);
12417 check_cp1_64bitmode(ctx
);
12418 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12420 TCGv_i64 fp0
= tcg_temp_new_i64();
12422 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12423 gen_store_fpr64(ctx
, fp0
, fd
);
12424 tcg_temp_free_i64(fp0
);
12430 TCGv_i32 fp0
= tcg_temp_new_i32();
12431 gen_load_fpr32(ctx
, fp0
, fs
);
12432 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12433 tcg_temp_free_i32(fp0
);
12438 check_cp1_registers(ctx
, fs
);
12440 TCGv_i64 fp0
= tcg_temp_new_i64();
12441 gen_load_fpr64(ctx
, fp0
, fs
);
12442 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12443 tcg_temp_free_i64(fp0
);
12447 check_cp1_64bitmode(ctx
);
12448 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12450 TCGv_i64 fp0
= tcg_temp_new_i64();
12451 gen_load_fpr64(ctx
, fp0
, fs
);
12452 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12453 tcg_temp_free_i64(fp0
);
12460 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
12461 int fd
, int fr
, int fs
, int ft
)
12467 TCGv t0
= tcg_temp_local_new();
12468 TCGv_i32 fp
= tcg_temp_new_i32();
12469 TCGv_i32 fph
= tcg_temp_new_i32();
12470 TCGLabel
*l1
= gen_new_label();
12471 TCGLabel
*l2
= gen_new_label();
12473 gen_load_gpr(t0
, fr
);
12474 tcg_gen_andi_tl(t0
, t0
, 0x7);
12476 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12477 gen_load_fpr32(ctx
, fp
, fs
);
12478 gen_load_fpr32h(ctx
, fph
, fs
);
12479 gen_store_fpr32(ctx
, fp
, fd
);
12480 gen_store_fpr32h(ctx
, fph
, fd
);
12483 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12485 #ifdef TARGET_WORDS_BIGENDIAN
12486 gen_load_fpr32(ctx
, fp
, fs
);
12487 gen_load_fpr32h(ctx
, fph
, ft
);
12488 gen_store_fpr32h(ctx
, fp
, fd
);
12489 gen_store_fpr32(ctx
, fph
, fd
);
12491 gen_load_fpr32h(ctx
, fph
, fs
);
12492 gen_load_fpr32(ctx
, fp
, ft
);
12493 gen_store_fpr32(ctx
, fph
, fd
);
12494 gen_store_fpr32h(ctx
, fp
, fd
);
12497 tcg_temp_free_i32(fp
);
12498 tcg_temp_free_i32(fph
);
12504 TCGv_i32 fp0
= tcg_temp_new_i32();
12505 TCGv_i32 fp1
= tcg_temp_new_i32();
12506 TCGv_i32 fp2
= tcg_temp_new_i32();
12508 gen_load_fpr32(ctx
, fp0
, fs
);
12509 gen_load_fpr32(ctx
, fp1
, ft
);
12510 gen_load_fpr32(ctx
, fp2
, fr
);
12511 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12512 tcg_temp_free_i32(fp0
);
12513 tcg_temp_free_i32(fp1
);
12514 gen_store_fpr32(ctx
, fp2
, fd
);
12515 tcg_temp_free_i32(fp2
);
12520 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12522 TCGv_i64 fp0
= tcg_temp_new_i64();
12523 TCGv_i64 fp1
= tcg_temp_new_i64();
12524 TCGv_i64 fp2
= tcg_temp_new_i64();
12526 gen_load_fpr64(ctx
, fp0
, fs
);
12527 gen_load_fpr64(ctx
, fp1
, ft
);
12528 gen_load_fpr64(ctx
, fp2
, fr
);
12529 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12530 tcg_temp_free_i64(fp0
);
12531 tcg_temp_free_i64(fp1
);
12532 gen_store_fpr64(ctx
, fp2
, fd
);
12533 tcg_temp_free_i64(fp2
);
12539 TCGv_i64 fp0
= tcg_temp_new_i64();
12540 TCGv_i64 fp1
= tcg_temp_new_i64();
12541 TCGv_i64 fp2
= tcg_temp_new_i64();
12543 gen_load_fpr64(ctx
, fp0
, fs
);
12544 gen_load_fpr64(ctx
, fp1
, ft
);
12545 gen_load_fpr64(ctx
, fp2
, fr
);
12546 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12547 tcg_temp_free_i64(fp0
);
12548 tcg_temp_free_i64(fp1
);
12549 gen_store_fpr64(ctx
, fp2
, fd
);
12550 tcg_temp_free_i64(fp2
);
12556 TCGv_i32 fp0
= tcg_temp_new_i32();
12557 TCGv_i32 fp1
= tcg_temp_new_i32();
12558 TCGv_i32 fp2
= tcg_temp_new_i32();
12560 gen_load_fpr32(ctx
, fp0
, fs
);
12561 gen_load_fpr32(ctx
, fp1
, ft
);
12562 gen_load_fpr32(ctx
, fp2
, fr
);
12563 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12564 tcg_temp_free_i32(fp0
);
12565 tcg_temp_free_i32(fp1
);
12566 gen_store_fpr32(ctx
, fp2
, fd
);
12567 tcg_temp_free_i32(fp2
);
12572 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12574 TCGv_i64 fp0
= tcg_temp_new_i64();
12575 TCGv_i64 fp1
= tcg_temp_new_i64();
12576 TCGv_i64 fp2
= tcg_temp_new_i64();
12578 gen_load_fpr64(ctx
, fp0
, fs
);
12579 gen_load_fpr64(ctx
, fp1
, ft
);
12580 gen_load_fpr64(ctx
, fp2
, fr
);
12581 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12582 tcg_temp_free_i64(fp0
);
12583 tcg_temp_free_i64(fp1
);
12584 gen_store_fpr64(ctx
, fp2
, fd
);
12585 tcg_temp_free_i64(fp2
);
12591 TCGv_i64 fp0
= tcg_temp_new_i64();
12592 TCGv_i64 fp1
= tcg_temp_new_i64();
12593 TCGv_i64 fp2
= tcg_temp_new_i64();
12595 gen_load_fpr64(ctx
, fp0
, fs
);
12596 gen_load_fpr64(ctx
, fp1
, ft
);
12597 gen_load_fpr64(ctx
, fp2
, fr
);
12598 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12599 tcg_temp_free_i64(fp0
);
12600 tcg_temp_free_i64(fp1
);
12601 gen_store_fpr64(ctx
, fp2
, fd
);
12602 tcg_temp_free_i64(fp2
);
12608 TCGv_i32 fp0
= tcg_temp_new_i32();
12609 TCGv_i32 fp1
= tcg_temp_new_i32();
12610 TCGv_i32 fp2
= tcg_temp_new_i32();
12612 gen_load_fpr32(ctx
, fp0
, fs
);
12613 gen_load_fpr32(ctx
, fp1
, ft
);
12614 gen_load_fpr32(ctx
, fp2
, fr
);
12615 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12616 tcg_temp_free_i32(fp0
);
12617 tcg_temp_free_i32(fp1
);
12618 gen_store_fpr32(ctx
, fp2
, fd
);
12619 tcg_temp_free_i32(fp2
);
12624 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12626 TCGv_i64 fp0
= tcg_temp_new_i64();
12627 TCGv_i64 fp1
= tcg_temp_new_i64();
12628 TCGv_i64 fp2
= tcg_temp_new_i64();
12630 gen_load_fpr64(ctx
, fp0
, fs
);
12631 gen_load_fpr64(ctx
, fp1
, ft
);
12632 gen_load_fpr64(ctx
, fp2
, fr
);
12633 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12634 tcg_temp_free_i64(fp0
);
12635 tcg_temp_free_i64(fp1
);
12636 gen_store_fpr64(ctx
, fp2
, fd
);
12637 tcg_temp_free_i64(fp2
);
12643 TCGv_i64 fp0
= tcg_temp_new_i64();
12644 TCGv_i64 fp1
= tcg_temp_new_i64();
12645 TCGv_i64 fp2
= tcg_temp_new_i64();
12647 gen_load_fpr64(ctx
, fp0
, fs
);
12648 gen_load_fpr64(ctx
, fp1
, ft
);
12649 gen_load_fpr64(ctx
, fp2
, fr
);
12650 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12651 tcg_temp_free_i64(fp0
);
12652 tcg_temp_free_i64(fp1
);
12653 gen_store_fpr64(ctx
, fp2
, fd
);
12654 tcg_temp_free_i64(fp2
);
12660 TCGv_i32 fp0
= tcg_temp_new_i32();
12661 TCGv_i32 fp1
= tcg_temp_new_i32();
12662 TCGv_i32 fp2
= tcg_temp_new_i32();
12664 gen_load_fpr32(ctx
, fp0
, fs
);
12665 gen_load_fpr32(ctx
, fp1
, ft
);
12666 gen_load_fpr32(ctx
, fp2
, fr
);
12667 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12668 tcg_temp_free_i32(fp0
);
12669 tcg_temp_free_i32(fp1
);
12670 gen_store_fpr32(ctx
, fp2
, fd
);
12671 tcg_temp_free_i32(fp2
);
12676 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12678 TCGv_i64 fp0
= tcg_temp_new_i64();
12679 TCGv_i64 fp1
= tcg_temp_new_i64();
12680 TCGv_i64 fp2
= tcg_temp_new_i64();
12682 gen_load_fpr64(ctx
, fp0
, fs
);
12683 gen_load_fpr64(ctx
, fp1
, ft
);
12684 gen_load_fpr64(ctx
, fp2
, fr
);
12685 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12686 tcg_temp_free_i64(fp0
);
12687 tcg_temp_free_i64(fp1
);
12688 gen_store_fpr64(ctx
, fp2
, fd
);
12689 tcg_temp_free_i64(fp2
);
12695 TCGv_i64 fp0
= tcg_temp_new_i64();
12696 TCGv_i64 fp1
= tcg_temp_new_i64();
12697 TCGv_i64 fp2
= tcg_temp_new_i64();
12699 gen_load_fpr64(ctx
, fp0
, fs
);
12700 gen_load_fpr64(ctx
, fp1
, ft
);
12701 gen_load_fpr64(ctx
, fp2
, fr
);
12702 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12703 tcg_temp_free_i64(fp0
);
12704 tcg_temp_free_i64(fp1
);
12705 gen_store_fpr64(ctx
, fp2
, fd
);
12706 tcg_temp_free_i64(fp2
);
12710 MIPS_INVAL("flt3_arith");
12711 generate_exception_end(ctx
, EXCP_RI
);
12716 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12720 #if !defined(CONFIG_USER_ONLY)
12721 /* The Linux kernel will emulate rdhwr if it's not supported natively.
12722 Therefore only check the ISA in system mode. */
12723 check_insn(ctx
, ISA_MIPS32R2
);
12725 t0
= tcg_temp_new();
12729 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12730 gen_store_gpr(t0
, rt
);
12733 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12734 gen_store_gpr(t0
, rt
);
12737 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12740 gen_helper_rdhwr_cc(t0
, cpu_env
);
12741 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12744 gen_store_gpr(t0
, rt
);
12745 /* Break the TB to be able to take timer interrupts immediately
12746 after reading count. DISAS_STOP isn't sufficient, we need to ensure
12747 we break completely out of translated code. */
12748 gen_save_pc(ctx
->base
.pc_next
+ 4);
12749 ctx
->base
.is_jmp
= DISAS_EXIT
;
12752 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12753 gen_store_gpr(t0
, rt
);
12756 check_insn(ctx
, ISA_MIPS32R6
);
12758 /* Performance counter registers are not implemented other than
12759 * control register 0.
12761 generate_exception(ctx
, EXCP_RI
);
12763 gen_helper_rdhwr_performance(t0
, cpu_env
);
12764 gen_store_gpr(t0
, rt
);
12767 check_insn(ctx
, ISA_MIPS32R6
);
12768 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12769 gen_store_gpr(t0
, rt
);
12772 #if defined(CONFIG_USER_ONLY)
12773 tcg_gen_ld_tl(t0
, cpu_env
,
12774 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12775 gen_store_gpr(t0
, rt
);
12778 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12779 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12780 tcg_gen_ld_tl(t0
, cpu_env
,
12781 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12782 gen_store_gpr(t0
, rt
);
12784 generate_exception_end(ctx
, EXCP_RI
);
12788 default: /* Invalid */
12789 MIPS_INVAL("rdhwr");
12790 generate_exception_end(ctx
, EXCP_RI
);
12796 static inline void clear_branch_hflags(DisasContext
*ctx
)
12798 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12799 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12800 save_cpu_state(ctx
, 0);
12802 /* it is not safe to save ctx->hflags as hflags may be changed
12803 in execution time by the instruction in delay / forbidden slot. */
12804 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12808 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12810 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12811 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12812 /* Branches completion */
12813 clear_branch_hflags(ctx
);
12814 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12815 /* FIXME: Need to clear can_do_io. */
12816 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12817 case MIPS_HFLAG_FBNSLOT
:
12818 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12821 /* unconditional branch */
12822 if (proc_hflags
& MIPS_HFLAG_BX
) {
12823 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12825 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12827 case MIPS_HFLAG_BL
:
12828 /* blikely taken case */
12829 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12831 case MIPS_HFLAG_BC
:
12832 /* Conditional branch */
12834 TCGLabel
*l1
= gen_new_label();
12836 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12837 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12839 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12842 case MIPS_HFLAG_BR
:
12843 /* unconditional branch to register */
12844 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12845 TCGv t0
= tcg_temp_new();
12846 TCGv_i32 t1
= tcg_temp_new_i32();
12848 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12849 tcg_gen_trunc_tl_i32(t1
, t0
);
12851 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12852 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12853 tcg_gen_or_i32(hflags
, hflags
, t1
);
12854 tcg_temp_free_i32(t1
);
12856 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12858 tcg_gen_mov_tl(cpu_PC
, btarget
);
12860 if (ctx
->base
.singlestep_enabled
) {
12861 save_cpu_state(ctx
, 0);
12862 gen_helper_raise_exception_debug(cpu_env
);
12864 tcg_gen_lookup_and_goto_ptr();
12867 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12873 /* Compact Branches */
12874 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12875 int rs
, int rt
, int32_t offset
)
12877 int bcond_compute
= 0;
12878 TCGv t0
= tcg_temp_new();
12879 TCGv t1
= tcg_temp_new();
12880 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12882 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12883 #ifdef MIPS_DEBUG_DISAS
12884 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12885 "\n", ctx
->base
.pc_next
);
12887 generate_exception_end(ctx
, EXCP_RI
);
12891 /* Load needed operands and calculate btarget */
12893 /* compact branch */
12894 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12895 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12896 gen_load_gpr(t0
, rs
);
12897 gen_load_gpr(t1
, rt
);
12899 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12900 if (rs
<= rt
&& rs
== 0) {
12901 /* OPC_BEQZALC, OPC_BNEZALC */
12902 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12905 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12906 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12907 gen_load_gpr(t0
, rs
);
12908 gen_load_gpr(t1
, rt
);
12910 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12912 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12913 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12914 if (rs
== 0 || rs
== rt
) {
12915 /* OPC_BLEZALC, OPC_BGEZALC */
12916 /* OPC_BGTZALC, OPC_BLTZALC */
12917 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12919 gen_load_gpr(t0
, rs
);
12920 gen_load_gpr(t1
, rt
);
12922 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12926 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12931 /* OPC_BEQZC, OPC_BNEZC */
12932 gen_load_gpr(t0
, rs
);
12934 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12936 /* OPC_JIC, OPC_JIALC */
12937 TCGv tbase
= tcg_temp_new();
12938 TCGv toffset
= tcg_temp_new();
12940 gen_load_gpr(tbase
, rt
);
12941 tcg_gen_movi_tl(toffset
, offset
);
12942 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
12943 tcg_temp_free(tbase
);
12944 tcg_temp_free(toffset
);
12948 MIPS_INVAL("Compact branch/jump");
12949 generate_exception_end(ctx
, EXCP_RI
);
12953 if (bcond_compute
== 0) {
12954 /* Uncoditional compact branch */
12957 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12960 ctx
->hflags
|= MIPS_HFLAG_BR
;
12963 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12966 ctx
->hflags
|= MIPS_HFLAG_B
;
12969 MIPS_INVAL("Compact branch/jump");
12970 generate_exception_end(ctx
, EXCP_RI
);
12974 /* Generating branch here as compact branches don't have delay slot */
12975 gen_branch(ctx
, 4);
12977 /* Conditional compact branch */
12978 TCGLabel
*fs
= gen_new_label();
12979 save_cpu_state(ctx
, 0);
12982 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12983 if (rs
== 0 && rt
!= 0) {
12985 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
12986 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
12988 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
12991 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
12994 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12995 if (rs
== 0 && rt
!= 0) {
12997 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
12998 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13000 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13003 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
13006 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13007 if (rs
== 0 && rt
!= 0) {
13009 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13010 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13012 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13015 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
13018 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13019 if (rs
== 0 && rt
!= 0) {
13021 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13022 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13024 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13027 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13030 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13031 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13033 /* OPC_BOVC, OPC_BNVC */
13034 TCGv t2
= tcg_temp_new();
13035 TCGv t3
= tcg_temp_new();
13036 TCGv t4
= tcg_temp_new();
13037 TCGv input_overflow
= tcg_temp_new();
13039 gen_load_gpr(t0
, rs
);
13040 gen_load_gpr(t1
, rt
);
13041 tcg_gen_ext32s_tl(t2
, t0
);
13042 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13043 tcg_gen_ext32s_tl(t3
, t1
);
13044 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13045 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13047 tcg_gen_add_tl(t4
, t2
, t3
);
13048 tcg_gen_ext32s_tl(t4
, t4
);
13049 tcg_gen_xor_tl(t2
, t2
, t3
);
13050 tcg_gen_xor_tl(t3
, t4
, t3
);
13051 tcg_gen_andc_tl(t2
, t3
, t2
);
13052 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13053 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13054 if (opc
== OPC_BOVC
) {
13056 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13059 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13061 tcg_temp_free(input_overflow
);
13065 } else if (rs
< rt
&& rs
== 0) {
13066 /* OPC_BEQZALC, OPC_BNEZALC */
13067 if (opc
== OPC_BEQZALC
) {
13069 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13072 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13075 /* OPC_BEQC, OPC_BNEC */
13076 if (opc
== OPC_BEQC
) {
13078 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13081 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13086 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13089 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13092 MIPS_INVAL("Compact conditional branch/jump");
13093 generate_exception_end(ctx
, EXCP_RI
);
13097 /* Generating branch here as compact branches don't have delay slot */
13098 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13101 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13109 /* ISA extensions (ASEs) */
13110 /* MIPS16 extension to MIPS32 */
13112 /* MIPS16 major opcodes */
13114 M16_OPC_ADDIUSP
= 0x00,
13115 M16_OPC_ADDIUPC
= 0x01,
13117 M16_OPC_JAL
= 0x03,
13118 M16_OPC_BEQZ
= 0x04,
13119 M16_OPC_BNEQZ
= 0x05,
13120 M16_OPC_SHIFT
= 0x06,
13122 M16_OPC_RRIA
= 0x08,
13123 M16_OPC_ADDIU8
= 0x09,
13124 M16_OPC_SLTI
= 0x0a,
13125 M16_OPC_SLTIU
= 0x0b,
13128 M16_OPC_CMPI
= 0x0e,
13132 M16_OPC_LWSP
= 0x12,
13134 M16_OPC_LBU
= 0x14,
13135 M16_OPC_LHU
= 0x15,
13136 M16_OPC_LWPC
= 0x16,
13137 M16_OPC_LWU
= 0x17,
13140 M16_OPC_SWSP
= 0x1a,
13142 M16_OPC_RRR
= 0x1c,
13144 M16_OPC_EXTEND
= 0x1e,
13148 /* I8 funct field */
13167 /* RR funct field */
13201 /* I64 funct field */
13209 I64_DADDIUPC
= 0x6,
13213 /* RR ry field for CNVT */
13215 RR_RY_CNVT_ZEB
= 0x0,
13216 RR_RY_CNVT_ZEH
= 0x1,
13217 RR_RY_CNVT_ZEW
= 0x2,
13218 RR_RY_CNVT_SEB
= 0x4,
13219 RR_RY_CNVT_SEH
= 0x5,
13220 RR_RY_CNVT_SEW
= 0x6,
13223 static int xlat(int r
)
13225 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13230 static void gen_mips16_save(DisasContext
*ctx
,
13231 int xsregs
, int aregs
,
13232 int do_ra
, int do_s0
, int do_s1
,
13235 TCGv t0
= tcg_temp_new();
13236 TCGv t1
= tcg_temp_new();
13237 TCGv t2
= tcg_temp_new();
13267 generate_exception_end(ctx
, EXCP_RI
);
13273 gen_base_offset_addr(ctx
, t0
, 29, 12);
13274 gen_load_gpr(t1
, 7);
13275 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13278 gen_base_offset_addr(ctx
, t0
, 29, 8);
13279 gen_load_gpr(t1
, 6);
13280 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13283 gen_base_offset_addr(ctx
, t0
, 29, 4);
13284 gen_load_gpr(t1
, 5);
13285 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13288 gen_base_offset_addr(ctx
, t0
, 29, 0);
13289 gen_load_gpr(t1
, 4);
13290 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13293 gen_load_gpr(t0
, 29);
13295 #define DECR_AND_STORE(reg) do { \
13296 tcg_gen_movi_tl(t2, -4); \
13297 gen_op_addr_add(ctx, t0, t0, t2); \
13298 gen_load_gpr(t1, reg); \
13299 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13303 DECR_AND_STORE(31);
13308 DECR_AND_STORE(30);
13311 DECR_AND_STORE(23);
13314 DECR_AND_STORE(22);
13317 DECR_AND_STORE(21);
13320 DECR_AND_STORE(20);
13323 DECR_AND_STORE(19);
13326 DECR_AND_STORE(18);
13330 DECR_AND_STORE(17);
13333 DECR_AND_STORE(16);
13363 generate_exception_end(ctx
, EXCP_RI
);
13379 #undef DECR_AND_STORE
13381 tcg_gen_movi_tl(t2
, -framesize
);
13382 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13388 static void gen_mips16_restore(DisasContext
*ctx
,
13389 int xsregs
, int aregs
,
13390 int do_ra
, int do_s0
, int do_s1
,
13394 TCGv t0
= tcg_temp_new();
13395 TCGv t1
= tcg_temp_new();
13396 TCGv t2
= tcg_temp_new();
13398 tcg_gen_movi_tl(t2
, framesize
);
13399 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13401 #define DECR_AND_LOAD(reg) do { \
13402 tcg_gen_movi_tl(t2, -4); \
13403 gen_op_addr_add(ctx, t0, t0, t2); \
13404 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13405 gen_store_gpr(t1, reg); \
13469 generate_exception_end(ctx
, EXCP_RI
);
13485 #undef DECR_AND_LOAD
13487 tcg_gen_movi_tl(t2
, framesize
);
13488 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13494 static void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
13495 int is_64_bit
, int extended
)
13499 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13500 generate_exception_end(ctx
, EXCP_RI
);
13504 t0
= tcg_temp_new();
13506 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13507 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13509 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13515 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13518 TCGv_i32 t0
= tcg_const_i32(op
);
13519 TCGv t1
= tcg_temp_new();
13520 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13521 gen_helper_cache(cpu_env
, t1
, t0
);
13524 #if defined(TARGET_MIPS64)
13525 static void decode_i64_mips16(DisasContext
*ctx
,
13526 int ry
, int funct
, int16_t offset
,
13531 check_insn(ctx
, ISA_MIPS3
);
13532 check_mips_64(ctx
);
13533 offset
= extended
? offset
: offset
<< 3;
13534 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13537 check_insn(ctx
, ISA_MIPS3
);
13538 check_mips_64(ctx
);
13539 offset
= extended
? offset
: offset
<< 3;
13540 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13543 check_insn(ctx
, ISA_MIPS3
);
13544 check_mips_64(ctx
);
13545 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13546 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13549 check_insn(ctx
, ISA_MIPS3
);
13550 check_mips_64(ctx
);
13551 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13552 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13555 check_insn(ctx
, ISA_MIPS3
);
13556 check_mips_64(ctx
);
13557 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13558 generate_exception_end(ctx
, EXCP_RI
);
13560 offset
= extended
? offset
: offset
<< 3;
13561 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13565 check_insn(ctx
, ISA_MIPS3
);
13566 check_mips_64(ctx
);
13567 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13568 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13571 check_insn(ctx
, ISA_MIPS3
);
13572 check_mips_64(ctx
);
13573 offset
= extended
? offset
: offset
<< 2;
13574 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13577 check_insn(ctx
, ISA_MIPS3
);
13578 check_mips_64(ctx
);
13579 offset
= extended
? offset
: offset
<< 2;
13580 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13586 static int decode_extended_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13588 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13589 int op
, rx
, ry
, funct
, sa
;
13590 int16_t imm
, offset
;
13592 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13593 op
= (ctx
->opcode
>> 11) & 0x1f;
13594 sa
= (ctx
->opcode
>> 22) & 0x1f;
13595 funct
= (ctx
->opcode
>> 8) & 0x7;
13596 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13597 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13598 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13599 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13600 | (ctx
->opcode
& 0x1f));
13602 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
13605 case M16_OPC_ADDIUSP
:
13606 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13608 case M16_OPC_ADDIUPC
:
13609 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13612 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13613 /* No delay slot, so just process as a normal instruction */
13616 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13617 /* No delay slot, so just process as a normal instruction */
13619 case M16_OPC_BNEQZ
:
13620 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13621 /* No delay slot, so just process as a normal instruction */
13623 case M16_OPC_SHIFT
:
13624 switch (ctx
->opcode
& 0x3) {
13626 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13629 #if defined(TARGET_MIPS64)
13630 check_mips_64(ctx
);
13631 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13633 generate_exception_end(ctx
, EXCP_RI
);
13637 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13640 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13644 #if defined(TARGET_MIPS64)
13646 check_insn(ctx
, ISA_MIPS3
);
13647 check_mips_64(ctx
);
13648 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13652 imm
= ctx
->opcode
& 0xf;
13653 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13654 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13655 imm
= (int16_t) (imm
<< 1) >> 1;
13656 if ((ctx
->opcode
>> 4) & 0x1) {
13657 #if defined(TARGET_MIPS64)
13658 check_mips_64(ctx
);
13659 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13661 generate_exception_end(ctx
, EXCP_RI
);
13664 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13667 case M16_OPC_ADDIU8
:
13668 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13671 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13673 case M16_OPC_SLTIU
:
13674 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13679 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13682 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13685 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13688 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13691 check_insn(ctx
, ISA_MIPS32
);
13693 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13694 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13695 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13696 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13697 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13698 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13699 | (ctx
->opcode
& 0xf)) << 3;
13701 if (ctx
->opcode
& (1 << 7)) {
13702 gen_mips16_save(ctx
, xsregs
, aregs
,
13703 do_ra
, do_s0
, do_s1
,
13706 gen_mips16_restore(ctx
, xsregs
, aregs
,
13707 do_ra
, do_s0
, do_s1
,
13713 generate_exception_end(ctx
, EXCP_RI
);
13718 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13721 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13723 #if defined(TARGET_MIPS64)
13725 check_insn(ctx
, ISA_MIPS3
);
13726 check_mips_64(ctx
);
13727 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13731 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13734 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13737 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13740 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13743 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13746 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13749 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13751 #if defined(TARGET_MIPS64)
13753 check_insn(ctx
, ISA_MIPS3
);
13754 check_mips_64(ctx
);
13755 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13759 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13762 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13765 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13768 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13770 #if defined(TARGET_MIPS64)
13772 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13776 generate_exception_end(ctx
, EXCP_RI
);
13783 static inline bool is_uhi(int sdbbp_code
)
13785 #ifdef CONFIG_USER_ONLY
13788 return semihosting_enabled() && sdbbp_code
== 1;
13792 #ifdef CONFIG_USER_ONLY
13793 /* The above should dead-code away any calls to this..*/
13794 static inline void gen_helper_do_semihosting(void *env
)
13796 g_assert_not_reached();
13800 static int decode_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13804 int op
, cnvt_op
, op1
, offset
;
13808 op
= (ctx
->opcode
>> 11) & 0x1f;
13809 sa
= (ctx
->opcode
>> 2) & 0x7;
13810 sa
= sa
== 0 ? 8 : sa
;
13811 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13812 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13813 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13814 op1
= offset
= ctx
->opcode
& 0x1f;
13819 case M16_OPC_ADDIUSP
:
13821 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13823 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13826 case M16_OPC_ADDIUPC
:
13827 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13830 offset
= (ctx
->opcode
& 0x7ff) << 1;
13831 offset
= (int16_t)(offset
<< 4) >> 4;
13832 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13833 /* No delay slot, so just process as a normal instruction */
13836 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13837 offset
= (((ctx
->opcode
& 0x1f) << 21)
13838 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13840 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13841 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13845 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13846 ((int8_t)ctx
->opcode
) << 1, 0);
13847 /* No delay slot, so just process as a normal instruction */
13849 case M16_OPC_BNEQZ
:
13850 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13851 ((int8_t)ctx
->opcode
) << 1, 0);
13852 /* No delay slot, so just process as a normal instruction */
13854 case M16_OPC_SHIFT
:
13855 switch (ctx
->opcode
& 0x3) {
13857 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13860 #if defined(TARGET_MIPS64)
13861 check_insn(ctx
, ISA_MIPS3
);
13862 check_mips_64(ctx
);
13863 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13865 generate_exception_end(ctx
, EXCP_RI
);
13869 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13872 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13876 #if defined(TARGET_MIPS64)
13878 check_insn(ctx
, ISA_MIPS3
);
13879 check_mips_64(ctx
);
13880 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13885 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13887 if ((ctx
->opcode
>> 4) & 1) {
13888 #if defined(TARGET_MIPS64)
13889 check_insn(ctx
, ISA_MIPS3
);
13890 check_mips_64(ctx
);
13891 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13893 generate_exception_end(ctx
, EXCP_RI
);
13896 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13900 case M16_OPC_ADDIU8
:
13902 int16_t imm
= (int8_t) ctx
->opcode
;
13904 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13909 int16_t imm
= (uint8_t) ctx
->opcode
;
13910 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13913 case M16_OPC_SLTIU
:
13915 int16_t imm
= (uint8_t) ctx
->opcode
;
13916 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13923 funct
= (ctx
->opcode
>> 8) & 0x7;
13926 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
13927 ((int8_t)ctx
->opcode
) << 1, 0);
13930 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
13931 ((int8_t)ctx
->opcode
) << 1, 0);
13934 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
13937 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
13938 ((int8_t)ctx
->opcode
) << 3);
13941 check_insn(ctx
, ISA_MIPS32
);
13943 int do_ra
= ctx
->opcode
& (1 << 6);
13944 int do_s0
= ctx
->opcode
& (1 << 5);
13945 int do_s1
= ctx
->opcode
& (1 << 4);
13946 int framesize
= ctx
->opcode
& 0xf;
13948 if (framesize
== 0) {
13951 framesize
= framesize
<< 3;
13954 if (ctx
->opcode
& (1 << 7)) {
13955 gen_mips16_save(ctx
, 0, 0,
13956 do_ra
, do_s0
, do_s1
, framesize
);
13958 gen_mips16_restore(ctx
, 0, 0,
13959 do_ra
, do_s0
, do_s1
, framesize
);
13965 int rz
= xlat(ctx
->opcode
& 0x7);
13967 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
13968 ((ctx
->opcode
>> 5) & 0x7);
13969 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
13973 reg32
= ctx
->opcode
& 0x1f;
13974 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
13977 generate_exception_end(ctx
, EXCP_RI
);
13984 int16_t imm
= (uint8_t) ctx
->opcode
;
13986 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
13991 int16_t imm
= (uint8_t) ctx
->opcode
;
13992 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
13995 #if defined(TARGET_MIPS64)
13997 check_insn(ctx
, ISA_MIPS3
);
13998 check_mips_64(ctx
);
13999 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
14003 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
14006 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
14009 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14012 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
14015 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
14018 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
14021 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
14023 #if defined(TARGET_MIPS64)
14025 check_insn(ctx
, ISA_MIPS3
);
14026 check_mips_64(ctx
);
14027 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
14031 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14034 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14037 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14040 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14044 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14047 switch (ctx
->opcode
& 0x3) {
14049 mips32_op
= OPC_ADDU
;
14052 mips32_op
= OPC_SUBU
;
14054 #if defined(TARGET_MIPS64)
14056 mips32_op
= OPC_DADDU
;
14057 check_insn(ctx
, ISA_MIPS3
);
14058 check_mips_64(ctx
);
14061 mips32_op
= OPC_DSUBU
;
14062 check_insn(ctx
, ISA_MIPS3
);
14063 check_mips_64(ctx
);
14067 generate_exception_end(ctx
, EXCP_RI
);
14071 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14080 int nd
= (ctx
->opcode
>> 7) & 0x1;
14081 int link
= (ctx
->opcode
>> 6) & 0x1;
14082 int ra
= (ctx
->opcode
>> 5) & 0x1;
14085 check_insn(ctx
, ISA_MIPS32
);
14094 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14099 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14100 gen_helper_do_semihosting(cpu_env
);
14102 /* XXX: not clear which exception should be raised
14103 * when in debug mode...
14105 check_insn(ctx
, ISA_MIPS32
);
14106 generate_exception_end(ctx
, EXCP_DBp
);
14110 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14113 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14116 generate_exception_end(ctx
, EXCP_BREAK
);
14119 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14122 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14125 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14127 #if defined(TARGET_MIPS64)
14129 check_insn(ctx
, ISA_MIPS3
);
14130 check_mips_64(ctx
);
14131 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14135 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14138 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14141 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14144 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14147 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14150 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14153 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14156 check_insn(ctx
, ISA_MIPS32
);
14158 case RR_RY_CNVT_ZEB
:
14159 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14161 case RR_RY_CNVT_ZEH
:
14162 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14164 case RR_RY_CNVT_SEB
:
14165 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14167 case RR_RY_CNVT_SEH
:
14168 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14170 #if defined (TARGET_MIPS64)
14171 case RR_RY_CNVT_ZEW
:
14172 check_insn(ctx
, ISA_MIPS64
);
14173 check_mips_64(ctx
);
14174 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14176 case RR_RY_CNVT_SEW
:
14177 check_insn(ctx
, ISA_MIPS64
);
14178 check_mips_64(ctx
);
14179 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14183 generate_exception_end(ctx
, EXCP_RI
);
14188 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14190 #if defined(TARGET_MIPS64)
14192 check_insn(ctx
, ISA_MIPS3
);
14193 check_mips_64(ctx
);
14194 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14197 check_insn(ctx
, ISA_MIPS3
);
14198 check_mips_64(ctx
);
14199 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14202 check_insn(ctx
, ISA_MIPS3
);
14203 check_mips_64(ctx
);
14204 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14207 check_insn(ctx
, ISA_MIPS3
);
14208 check_mips_64(ctx
);
14209 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14213 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14216 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14219 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14222 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14224 #if defined(TARGET_MIPS64)
14226 check_insn(ctx
, ISA_MIPS3
);
14227 check_mips_64(ctx
);
14228 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14231 check_insn(ctx
, ISA_MIPS3
);
14232 check_mips_64(ctx
);
14233 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14236 check_insn(ctx
, ISA_MIPS3
);
14237 check_mips_64(ctx
);
14238 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14241 check_insn(ctx
, ISA_MIPS3
);
14242 check_mips_64(ctx
);
14243 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14247 generate_exception_end(ctx
, EXCP_RI
);
14251 case M16_OPC_EXTEND
:
14252 decode_extended_mips16_opc(env
, ctx
);
14255 #if defined(TARGET_MIPS64)
14257 funct
= (ctx
->opcode
>> 8) & 0x7;
14258 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14262 generate_exception_end(ctx
, EXCP_RI
);
14269 /* microMIPS extension to MIPS32/MIPS64 */
14272 * microMIPS32/microMIPS64 major opcodes
14274 * 1. MIPS Architecture for Programmers Volume II-B:
14275 * The microMIPS32 Instruction Set (Revision 3.05)
14277 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14279 * 2. MIPS Architecture For Programmers Volume II-A:
14280 * The MIPS64 Instruction Set (Revision 3.51)
14310 POOL32S
= 0x16, /* MIPS64 */
14311 DADDIU32
= 0x17, /* MIPS64 */
14340 /* 0x29 is reserved */
14353 /* 0x31 is reserved */
14366 SD32
= 0x36, /* MIPS64 */
14367 LD32
= 0x37, /* MIPS64 */
14369 /* 0x39 is reserved */
14385 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14407 /* POOL32A encoding of minor opcode field */
14410 /* These opcodes are distinguished only by bits 9..6; those bits are
14411 * what are recorded below. */
14448 /* The following can be distinguished by their lower 6 bits. */
14458 /* POOL32AXF encoding of minor opcode field extension */
14461 * 1. MIPS Architecture for Programmers Volume II-B:
14462 * The microMIPS32 Instruction Set (Revision 3.05)
14464 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14466 * 2. MIPS Architecture for Programmers VolumeIV-e:
14467 * The MIPS DSP Application-Specific Extension
14468 * to the microMIPS32 Architecture (Revision 2.34)
14470 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14485 /* begin of microMIPS32 DSP */
14487 /* bits 13..12 for 0x01 */
14493 /* bits 13..12 for 0x2a */
14499 /* bits 13..12 for 0x32 */
14503 /* end of microMIPS32 DSP */
14505 /* bits 15..12 for 0x2c */
14522 /* bits 15..12 for 0x34 */
14530 /* bits 15..12 for 0x3c */
14532 JR
= 0x0, /* alias */
14540 /* bits 15..12 for 0x05 */
14544 /* bits 15..12 for 0x0d */
14556 /* bits 15..12 for 0x15 */
14562 /* bits 15..12 for 0x1d */
14566 /* bits 15..12 for 0x2d */
14571 /* bits 15..12 for 0x35 */
14578 /* POOL32B encoding of minor opcode field (bits 15..12) */
14594 /* POOL32C encoding of minor opcode field (bits 15..12) */
14615 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14628 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14641 /* POOL32F encoding of minor opcode field (bits 5..0) */
14644 /* These are the bit 7..6 values */
14653 /* These are the bit 8..6 values */
14678 MOVZ_FMT_05
= 0x05,
14712 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14719 /* POOL32Fxf encoding of minor opcode extension field */
14757 /* POOL32I encoding of minor opcode field (bits 25..21) */
14787 /* These overlap and are distinguished by bit16 of the instruction */
14796 /* POOL16A encoding of minor opcode field */
14803 /* POOL16B encoding of minor opcode field */
14810 /* POOL16C encoding of minor opcode field */
14830 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14854 /* POOL16D encoding of minor opcode field */
14861 /* POOL16E encoding of minor opcode field */
14868 static int mmreg(int r
)
14870 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14875 /* Used for 16-bit store instructions. */
14876 static int mmreg2(int r
)
14878 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14883 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14884 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14885 #define uMIPS_RS2(op) uMIPS_RS(op)
14886 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14887 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14888 #define uMIPS_RS5(op) (op & 0x1f)
14890 /* Signed immediate */
14891 #define SIMM(op, start, width) \
14892 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14895 /* Zero-extended immediate */
14896 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14898 static void gen_addiur1sp(DisasContext
*ctx
)
14900 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14902 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14905 static void gen_addiur2(DisasContext
*ctx
)
14907 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14908 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14909 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14911 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14914 static void gen_addiusp(DisasContext
*ctx
)
14916 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14919 if (encoded
<= 1) {
14920 decoded
= 256 + encoded
;
14921 } else if (encoded
<= 255) {
14923 } else if (encoded
<= 509) {
14924 decoded
= encoded
- 512;
14926 decoded
= encoded
- 768;
14929 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
14932 static void gen_addius5(DisasContext
*ctx
)
14934 int imm
= SIMM(ctx
->opcode
, 1, 4);
14935 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14937 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
14940 static void gen_andi16(DisasContext
*ctx
)
14942 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
14943 31, 32, 63, 64, 255, 32768, 65535 };
14944 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14945 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14946 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
14948 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
14951 static void gen_ldst_multiple(DisasContext
*ctx
, uint32_t opc
, int reglist
,
14952 int base
, int16_t offset
)
14957 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
14958 generate_exception_end(ctx
, EXCP_RI
);
14962 t0
= tcg_temp_new();
14964 gen_base_offset_addr(ctx
, t0
, base
, offset
);
14966 t1
= tcg_const_tl(reglist
);
14967 t2
= tcg_const_i32(ctx
->mem_idx
);
14969 save_cpu_state(ctx
, 1);
14972 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
14975 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
14977 #ifdef TARGET_MIPS64
14979 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
14982 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
14988 tcg_temp_free_i32(t2
);
14992 static void gen_pool16c_insn(DisasContext
*ctx
)
14994 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
14995 int rs
= mmreg(ctx
->opcode
& 0x7);
14997 switch (((ctx
->opcode
) >> 4) & 0x3f) {
15002 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
15008 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
15014 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
15020 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
15027 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15028 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15030 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15039 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15040 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15042 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15049 int reg
= ctx
->opcode
& 0x1f;
15051 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15057 int reg
= ctx
->opcode
& 0x1f;
15058 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15059 /* Let normal delay slot handling in our caller take us
15060 to the branch target. */
15065 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15066 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15070 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15071 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15075 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15079 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15082 generate_exception_end(ctx
, EXCP_BREAK
);
15085 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15086 gen_helper_do_semihosting(cpu_env
);
15088 /* XXX: not clear which exception should be raised
15089 * when in debug mode...
15091 check_insn(ctx
, ISA_MIPS32
);
15092 generate_exception_end(ctx
, EXCP_DBp
);
15095 case JRADDIUSP
+ 0:
15096 case JRADDIUSP
+ 1:
15098 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15099 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15100 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15101 /* Let normal delay slot handling in our caller take us
15102 to the branch target. */
15106 generate_exception_end(ctx
, EXCP_RI
);
15111 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15114 int rd
, rs
, re
, rt
;
15115 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15116 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15117 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15118 rd
= rd_enc
[enc_dest
];
15119 re
= re_enc
[enc_dest
];
15120 rs
= rs_rt_enc
[enc_rs
];
15121 rt
= rs_rt_enc
[enc_rt
];
15123 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15125 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15128 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15130 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15134 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15136 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15137 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15139 switch (ctx
->opcode
& 0xf) {
15141 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15144 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15148 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15149 int offset
= extract32(ctx
->opcode
, 4, 4);
15150 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15153 case R6_JRC16
: /* JRCADDIUSP */
15154 if ((ctx
->opcode
>> 4) & 1) {
15156 int imm
= extract32(ctx
->opcode
, 5, 5);
15157 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15158 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15161 rs
= extract32(ctx
->opcode
, 5, 5);
15162 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15174 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15175 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15176 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15177 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15181 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15184 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15188 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15189 int offset
= extract32(ctx
->opcode
, 4, 4);
15190 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15193 case JALRC16
: /* BREAK16, SDBBP16 */
15194 switch (ctx
->opcode
& 0x3f) {
15196 case JALRC16
+ 0x20:
15198 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15203 generate_exception(ctx
, EXCP_BREAK
);
15207 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15208 gen_helper_do_semihosting(cpu_env
);
15210 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15211 generate_exception(ctx
, EXCP_RI
);
15213 generate_exception(ctx
, EXCP_DBp
);
15220 generate_exception(ctx
, EXCP_RI
);
15225 static void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
15227 TCGv t0
= tcg_temp_new();
15228 TCGv t1
= tcg_temp_new();
15230 gen_load_gpr(t0
, base
);
15233 gen_load_gpr(t1
, index
);
15234 tcg_gen_shli_tl(t1
, t1
, 2);
15235 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15238 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15239 gen_store_gpr(t1
, rd
);
15245 static void gen_ldst_pair(DisasContext
*ctx
, uint32_t opc
, int rd
,
15246 int base
, int16_t offset
)
15250 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15251 generate_exception_end(ctx
, EXCP_RI
);
15255 t0
= tcg_temp_new();
15256 t1
= tcg_temp_new();
15258 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15263 generate_exception_end(ctx
, EXCP_RI
);
15266 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15267 gen_store_gpr(t1
, rd
);
15268 tcg_gen_movi_tl(t1
, 4);
15269 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15270 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15271 gen_store_gpr(t1
, rd
+ 1);
15274 gen_load_gpr(t1
, rd
);
15275 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15276 tcg_gen_movi_tl(t1
, 4);
15277 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15278 gen_load_gpr(t1
, rd
+ 1);
15279 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15281 #ifdef TARGET_MIPS64
15284 generate_exception_end(ctx
, EXCP_RI
);
15287 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15288 gen_store_gpr(t1
, rd
);
15289 tcg_gen_movi_tl(t1
, 8);
15290 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15291 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15292 gen_store_gpr(t1
, rd
+ 1);
15295 gen_load_gpr(t1
, rd
);
15296 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15297 tcg_gen_movi_tl(t1
, 8);
15298 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15299 gen_load_gpr(t1
, rd
+ 1);
15300 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15308 static void gen_sync(int stype
)
15310 TCGBar tcg_mo
= TCG_BAR_SC
;
15313 case 0x4: /* SYNC_WMB */
15314 tcg_mo
|= TCG_MO_ST_ST
;
15316 case 0x10: /* SYNC_MB */
15317 tcg_mo
|= TCG_MO_ALL
;
15319 case 0x11: /* SYNC_ACQUIRE */
15320 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15322 case 0x12: /* SYNC_RELEASE */
15323 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15325 case 0x13: /* SYNC_RMB */
15326 tcg_mo
|= TCG_MO_LD_LD
;
15329 tcg_mo
|= TCG_MO_ALL
;
15333 tcg_gen_mb(tcg_mo
);
15336 static void gen_pool32axf(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15338 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15339 int minor
= (ctx
->opcode
>> 12) & 0xf;
15340 uint32_t mips32_op
;
15342 switch (extension
) {
15344 mips32_op
= OPC_TEQ
;
15347 mips32_op
= OPC_TGE
;
15350 mips32_op
= OPC_TGEU
;
15353 mips32_op
= OPC_TLT
;
15356 mips32_op
= OPC_TLTU
;
15359 mips32_op
= OPC_TNE
;
15361 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15363 #ifndef CONFIG_USER_ONLY
15366 check_cp0_enabled(ctx
);
15368 /* Treat as NOP. */
15371 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15375 check_cp0_enabled(ctx
);
15377 TCGv t0
= tcg_temp_new();
15379 gen_load_gpr(t0
, rt
);
15380 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15386 switch (minor
& 3) {
15388 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15391 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15394 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15397 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15400 goto pool32axf_invalid
;
15404 switch (minor
& 3) {
15406 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15409 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15412 goto pool32axf_invalid
;
15418 check_insn(ctx
, ISA_MIPS32R6
);
15419 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15422 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15425 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15428 mips32_op
= OPC_CLO
;
15431 mips32_op
= OPC_CLZ
;
15433 check_insn(ctx
, ISA_MIPS32
);
15434 gen_cl(ctx
, mips32_op
, rt
, rs
);
15437 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15438 gen_rdhwr(ctx
, rt
, rs
, 0);
15441 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15445 mips32_op
= OPC_MULT
;
15448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15449 mips32_op
= OPC_MULTU
;
15452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15453 mips32_op
= OPC_DIV
;
15456 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15457 mips32_op
= OPC_DIVU
;
15460 check_insn(ctx
, ISA_MIPS32
);
15461 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15464 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15465 mips32_op
= OPC_MADD
;
15468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15469 mips32_op
= OPC_MADDU
;
15472 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15473 mips32_op
= OPC_MSUB
;
15476 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15477 mips32_op
= OPC_MSUBU
;
15479 check_insn(ctx
, ISA_MIPS32
);
15480 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15483 goto pool32axf_invalid
;
15494 generate_exception_err(ctx
, EXCP_CpU
, 2);
15497 goto pool32axf_invalid
;
15502 case JALR
: /* JALRC */
15503 case JALR_HB
: /* JALRC_HB */
15504 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15505 /* JALRC, JALRC_HB */
15506 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15508 /* JALR, JALR_HB */
15509 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15510 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15515 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15516 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15517 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15520 goto pool32axf_invalid
;
15526 check_cp0_enabled(ctx
);
15527 check_insn(ctx
, ISA_MIPS32R2
);
15528 gen_load_srsgpr(rs
, rt
);
15531 check_cp0_enabled(ctx
);
15532 check_insn(ctx
, ISA_MIPS32R2
);
15533 gen_store_srsgpr(rs
, rt
);
15536 goto pool32axf_invalid
;
15539 #ifndef CONFIG_USER_ONLY
15543 mips32_op
= OPC_TLBP
;
15546 mips32_op
= OPC_TLBR
;
15549 mips32_op
= OPC_TLBWI
;
15552 mips32_op
= OPC_TLBWR
;
15555 mips32_op
= OPC_TLBINV
;
15558 mips32_op
= OPC_TLBINVF
;
15561 mips32_op
= OPC_WAIT
;
15564 mips32_op
= OPC_DERET
;
15567 mips32_op
= OPC_ERET
;
15569 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15572 goto pool32axf_invalid
;
15578 check_cp0_enabled(ctx
);
15580 TCGv t0
= tcg_temp_new();
15582 save_cpu_state(ctx
, 1);
15583 gen_helper_di(t0
, cpu_env
);
15584 gen_store_gpr(t0
, rs
);
15585 /* Stop translation as we may have switched the execution mode */
15586 ctx
->base
.is_jmp
= DISAS_STOP
;
15591 check_cp0_enabled(ctx
);
15593 TCGv t0
= tcg_temp_new();
15595 save_cpu_state(ctx
, 1);
15596 gen_helper_ei(t0
, cpu_env
);
15597 gen_store_gpr(t0
, rs
);
15598 /* DISAS_STOP isn't sufficient, we need to ensure we break out
15599 of translated code to check for pending interrupts. */
15600 gen_save_pc(ctx
->base
.pc_next
+ 4);
15601 ctx
->base
.is_jmp
= DISAS_EXIT
;
15606 goto pool32axf_invalid
;
15613 gen_sync(extract32(ctx
->opcode
, 16, 5));
15616 generate_exception_end(ctx
, EXCP_SYSCALL
);
15619 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15620 gen_helper_do_semihosting(cpu_env
);
15622 check_insn(ctx
, ISA_MIPS32
);
15623 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15624 generate_exception_end(ctx
, EXCP_RI
);
15626 generate_exception_end(ctx
, EXCP_DBp
);
15631 goto pool32axf_invalid
;
15635 switch (minor
& 3) {
15637 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15640 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15643 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15646 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15649 goto pool32axf_invalid
;
15653 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15656 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15659 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15662 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15665 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15668 goto pool32axf_invalid
;
15673 MIPS_INVAL("pool32axf");
15674 generate_exception_end(ctx
, EXCP_RI
);
15679 /* Values for microMIPS fmt field. Variable-width, depending on which
15680 formats the instruction supports. */
15699 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15701 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15702 uint32_t mips32_op
;
15704 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15705 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15706 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15708 switch (extension
) {
15709 case FLOAT_1BIT_FMT(CFC1
, 0):
15710 mips32_op
= OPC_CFC1
;
15712 case FLOAT_1BIT_FMT(CTC1
, 0):
15713 mips32_op
= OPC_CTC1
;
15715 case FLOAT_1BIT_FMT(MFC1
, 0):
15716 mips32_op
= OPC_MFC1
;
15718 case FLOAT_1BIT_FMT(MTC1
, 0):
15719 mips32_op
= OPC_MTC1
;
15721 case FLOAT_1BIT_FMT(MFHC1
, 0):
15722 mips32_op
= OPC_MFHC1
;
15724 case FLOAT_1BIT_FMT(MTHC1
, 0):
15725 mips32_op
= OPC_MTHC1
;
15727 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15730 /* Reciprocal square root */
15731 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15732 mips32_op
= OPC_RSQRT_S
;
15734 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15735 mips32_op
= OPC_RSQRT_D
;
15739 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15740 mips32_op
= OPC_SQRT_S
;
15742 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15743 mips32_op
= OPC_SQRT_D
;
15747 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15748 mips32_op
= OPC_RECIP_S
;
15750 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15751 mips32_op
= OPC_RECIP_D
;
15755 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15756 mips32_op
= OPC_FLOOR_L_S
;
15758 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15759 mips32_op
= OPC_FLOOR_L_D
;
15761 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15762 mips32_op
= OPC_FLOOR_W_S
;
15764 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15765 mips32_op
= OPC_FLOOR_W_D
;
15769 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15770 mips32_op
= OPC_CEIL_L_S
;
15772 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15773 mips32_op
= OPC_CEIL_L_D
;
15775 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15776 mips32_op
= OPC_CEIL_W_S
;
15778 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15779 mips32_op
= OPC_CEIL_W_D
;
15783 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15784 mips32_op
= OPC_TRUNC_L_S
;
15786 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15787 mips32_op
= OPC_TRUNC_L_D
;
15789 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15790 mips32_op
= OPC_TRUNC_W_S
;
15792 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15793 mips32_op
= OPC_TRUNC_W_D
;
15797 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15798 mips32_op
= OPC_ROUND_L_S
;
15800 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15801 mips32_op
= OPC_ROUND_L_D
;
15803 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15804 mips32_op
= OPC_ROUND_W_S
;
15806 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15807 mips32_op
= OPC_ROUND_W_D
;
15810 /* Integer to floating-point conversion */
15811 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15812 mips32_op
= OPC_CVT_L_S
;
15814 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15815 mips32_op
= OPC_CVT_L_D
;
15817 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15818 mips32_op
= OPC_CVT_W_S
;
15820 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15821 mips32_op
= OPC_CVT_W_D
;
15824 /* Paired-foo conversions */
15825 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15826 mips32_op
= OPC_CVT_S_PL
;
15828 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15829 mips32_op
= OPC_CVT_S_PU
;
15831 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15832 mips32_op
= OPC_CVT_PW_PS
;
15834 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15835 mips32_op
= OPC_CVT_PS_PW
;
15838 /* Floating-point moves */
15839 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15840 mips32_op
= OPC_MOV_S
;
15842 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15843 mips32_op
= OPC_MOV_D
;
15845 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15846 mips32_op
= OPC_MOV_PS
;
15849 /* Absolute value */
15850 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15851 mips32_op
= OPC_ABS_S
;
15853 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15854 mips32_op
= OPC_ABS_D
;
15856 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15857 mips32_op
= OPC_ABS_PS
;
15861 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15862 mips32_op
= OPC_NEG_S
;
15864 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15865 mips32_op
= OPC_NEG_D
;
15867 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15868 mips32_op
= OPC_NEG_PS
;
15871 /* Reciprocal square root step */
15872 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15873 mips32_op
= OPC_RSQRT1_S
;
15875 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15876 mips32_op
= OPC_RSQRT1_D
;
15878 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15879 mips32_op
= OPC_RSQRT1_PS
;
15882 /* Reciprocal step */
15883 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15884 mips32_op
= OPC_RECIP1_S
;
15886 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15887 mips32_op
= OPC_RECIP1_S
;
15889 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15890 mips32_op
= OPC_RECIP1_PS
;
15893 /* Conversions from double */
15894 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15895 mips32_op
= OPC_CVT_D_S
;
15897 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15898 mips32_op
= OPC_CVT_D_W
;
15900 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15901 mips32_op
= OPC_CVT_D_L
;
15904 /* Conversions from single */
15905 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15906 mips32_op
= OPC_CVT_S_D
;
15908 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15909 mips32_op
= OPC_CVT_S_W
;
15911 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15912 mips32_op
= OPC_CVT_S_L
;
15914 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
15917 /* Conditional moves on floating-point codes */
15918 case COND_FLOAT_MOV(MOVT
, 0):
15919 case COND_FLOAT_MOV(MOVT
, 1):
15920 case COND_FLOAT_MOV(MOVT
, 2):
15921 case COND_FLOAT_MOV(MOVT
, 3):
15922 case COND_FLOAT_MOV(MOVT
, 4):
15923 case COND_FLOAT_MOV(MOVT
, 5):
15924 case COND_FLOAT_MOV(MOVT
, 6):
15925 case COND_FLOAT_MOV(MOVT
, 7):
15926 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15927 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
15929 case COND_FLOAT_MOV(MOVF
, 0):
15930 case COND_FLOAT_MOV(MOVF
, 1):
15931 case COND_FLOAT_MOV(MOVF
, 2):
15932 case COND_FLOAT_MOV(MOVF
, 3):
15933 case COND_FLOAT_MOV(MOVF
, 4):
15934 case COND_FLOAT_MOV(MOVF
, 5):
15935 case COND_FLOAT_MOV(MOVF
, 6):
15936 case COND_FLOAT_MOV(MOVF
, 7):
15937 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15938 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
15941 MIPS_INVAL("pool32fxf");
15942 generate_exception_end(ctx
, EXCP_RI
);
15947 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15951 int rt
, rs
, rd
, rr
;
15953 uint32_t op
, minor
, minor2
, mips32_op
;
15954 uint32_t cond
, fmt
, cc
;
15956 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
15957 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
15959 rt
= (ctx
->opcode
>> 21) & 0x1f;
15960 rs
= (ctx
->opcode
>> 16) & 0x1f;
15961 rd
= (ctx
->opcode
>> 11) & 0x1f;
15962 rr
= (ctx
->opcode
>> 6) & 0x1f;
15963 imm
= (int16_t) ctx
->opcode
;
15965 op
= (ctx
->opcode
>> 26) & 0x3f;
15968 minor
= ctx
->opcode
& 0x3f;
15971 minor
= (ctx
->opcode
>> 6) & 0xf;
15974 mips32_op
= OPC_SLL
;
15977 mips32_op
= OPC_SRA
;
15980 mips32_op
= OPC_SRL
;
15983 mips32_op
= OPC_ROTR
;
15985 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
15988 check_insn(ctx
, ISA_MIPS32R6
);
15989 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
15992 check_insn(ctx
, ISA_MIPS32R6
);
15993 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
15996 check_insn(ctx
, ISA_MIPS32R6
);
15997 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
16000 goto pool32a_invalid
;
16004 minor
= (ctx
->opcode
>> 6) & 0xf;
16008 mips32_op
= OPC_ADD
;
16011 mips32_op
= OPC_ADDU
;
16014 mips32_op
= OPC_SUB
;
16017 mips32_op
= OPC_SUBU
;
16020 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16021 mips32_op
= OPC_MUL
;
16023 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
16027 mips32_op
= OPC_SLLV
;
16030 mips32_op
= OPC_SRLV
;
16033 mips32_op
= OPC_SRAV
;
16036 mips32_op
= OPC_ROTRV
;
16038 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16040 /* Logical operations */
16042 mips32_op
= OPC_AND
;
16045 mips32_op
= OPC_OR
;
16048 mips32_op
= OPC_NOR
;
16051 mips32_op
= OPC_XOR
;
16053 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16055 /* Set less than */
16057 mips32_op
= OPC_SLT
;
16060 mips32_op
= OPC_SLTU
;
16062 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16065 goto pool32a_invalid
;
16069 minor
= (ctx
->opcode
>> 6) & 0xf;
16071 /* Conditional moves */
16072 case MOVN
: /* MUL */
16073 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16075 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16078 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16081 case MOVZ
: /* MUH */
16082 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16084 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16087 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16091 check_insn(ctx
, ISA_MIPS32R6
);
16092 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16095 check_insn(ctx
, ISA_MIPS32R6
);
16096 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16098 case LWXS
: /* DIV */
16099 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16101 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16104 gen_ldxs(ctx
, rs
, rt
, rd
);
16108 check_insn(ctx
, ISA_MIPS32R6
);
16109 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16112 check_insn(ctx
, ISA_MIPS32R6
);
16113 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16116 check_insn(ctx
, ISA_MIPS32R6
);
16117 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16120 goto pool32a_invalid
;
16124 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16127 check_insn(ctx
, ISA_MIPS32R6
);
16128 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16129 extract32(ctx
->opcode
, 9, 2));
16132 check_insn(ctx
, ISA_MIPS32R6
);
16133 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16136 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16139 gen_pool32axf(env
, ctx
, rt
, rs
);
16142 generate_exception_end(ctx
, EXCP_BREAK
);
16145 check_insn(ctx
, ISA_MIPS32R6
);
16146 generate_exception_end(ctx
, EXCP_RI
);
16150 MIPS_INVAL("pool32a");
16151 generate_exception_end(ctx
, EXCP_RI
);
16156 minor
= (ctx
->opcode
>> 12) & 0xf;
16159 check_cp0_enabled(ctx
);
16160 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16161 gen_cache_operation(ctx
, rt
, rs
, imm
);
16166 /* COP2: Not implemented. */
16167 generate_exception_err(ctx
, EXCP_CpU
, 2);
16169 #ifdef TARGET_MIPS64
16172 check_insn(ctx
, ISA_MIPS3
);
16173 check_mips_64(ctx
);
16178 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16180 #ifdef TARGET_MIPS64
16183 check_insn(ctx
, ISA_MIPS3
);
16184 check_mips_64(ctx
);
16189 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16192 MIPS_INVAL("pool32b");
16193 generate_exception_end(ctx
, EXCP_RI
);
16198 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16199 minor
= ctx
->opcode
& 0x3f;
16200 check_cp1_enabled(ctx
);
16203 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16204 mips32_op
= OPC_ALNV_PS
;
16207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16208 mips32_op
= OPC_MADD_S
;
16211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16212 mips32_op
= OPC_MADD_D
;
16215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16216 mips32_op
= OPC_MADD_PS
;
16219 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16220 mips32_op
= OPC_MSUB_S
;
16223 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16224 mips32_op
= OPC_MSUB_D
;
16227 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16228 mips32_op
= OPC_MSUB_PS
;
16231 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16232 mips32_op
= OPC_NMADD_S
;
16235 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16236 mips32_op
= OPC_NMADD_D
;
16239 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16240 mips32_op
= OPC_NMADD_PS
;
16243 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16244 mips32_op
= OPC_NMSUB_S
;
16247 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16248 mips32_op
= OPC_NMSUB_D
;
16251 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16252 mips32_op
= OPC_NMSUB_PS
;
16254 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16256 case CABS_COND_FMT
:
16257 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16258 cond
= (ctx
->opcode
>> 6) & 0xf;
16259 cc
= (ctx
->opcode
>> 13) & 0x7;
16260 fmt
= (ctx
->opcode
>> 10) & 0x3;
16263 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16266 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16269 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16272 goto pool32f_invalid
;
16276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16277 cond
= (ctx
->opcode
>> 6) & 0xf;
16278 cc
= (ctx
->opcode
>> 13) & 0x7;
16279 fmt
= (ctx
->opcode
>> 10) & 0x3;
16282 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16285 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16288 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16291 goto pool32f_invalid
;
16295 check_insn(ctx
, ISA_MIPS32R6
);
16296 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16299 check_insn(ctx
, ISA_MIPS32R6
);
16300 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16303 gen_pool32fxf(ctx
, rt
, rs
);
16307 switch ((ctx
->opcode
>> 6) & 0x7) {
16309 mips32_op
= OPC_PLL_PS
;
16312 mips32_op
= OPC_PLU_PS
;
16315 mips32_op
= OPC_PUL_PS
;
16318 mips32_op
= OPC_PUU_PS
;
16321 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16322 mips32_op
= OPC_CVT_PS_S
;
16324 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16327 goto pool32f_invalid
;
16331 check_insn(ctx
, ISA_MIPS32R6
);
16332 switch ((ctx
->opcode
>> 9) & 0x3) {
16334 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16337 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16340 goto pool32f_invalid
;
16345 switch ((ctx
->opcode
>> 6) & 0x7) {
16347 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16348 mips32_op
= OPC_LWXC1
;
16351 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16352 mips32_op
= OPC_SWXC1
;
16355 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16356 mips32_op
= OPC_LDXC1
;
16359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16360 mips32_op
= OPC_SDXC1
;
16363 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16364 mips32_op
= OPC_LUXC1
;
16367 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16368 mips32_op
= OPC_SUXC1
;
16370 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16373 goto pool32f_invalid
;
16377 check_insn(ctx
, ISA_MIPS32R6
);
16378 switch ((ctx
->opcode
>> 9) & 0x3) {
16380 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16383 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16386 goto pool32f_invalid
;
16391 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16392 fmt
= (ctx
->opcode
>> 9) & 0x3;
16393 switch ((ctx
->opcode
>> 6) & 0x7) {
16397 mips32_op
= OPC_RSQRT2_S
;
16400 mips32_op
= OPC_RSQRT2_D
;
16403 mips32_op
= OPC_RSQRT2_PS
;
16406 goto pool32f_invalid
;
16412 mips32_op
= OPC_RECIP2_S
;
16415 mips32_op
= OPC_RECIP2_D
;
16418 mips32_op
= OPC_RECIP2_PS
;
16421 goto pool32f_invalid
;
16425 mips32_op
= OPC_ADDR_PS
;
16428 mips32_op
= OPC_MULR_PS
;
16430 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16433 goto pool32f_invalid
;
16437 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16438 cc
= (ctx
->opcode
>> 13) & 0x7;
16439 fmt
= (ctx
->opcode
>> 9) & 0x3;
16440 switch ((ctx
->opcode
>> 6) & 0x7) {
16441 case MOVF_FMT
: /* RINT_FMT */
16442 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16446 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16449 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16452 goto pool32f_invalid
;
16458 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16461 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16465 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16468 goto pool32f_invalid
;
16472 case MOVT_FMT
: /* CLASS_FMT */
16473 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16477 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16480 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16483 goto pool32f_invalid
;
16489 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16492 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16496 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16499 goto pool32f_invalid
;
16504 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16507 goto pool32f_invalid
;
16510 #define FINSN_3ARG_SDPS(prfx) \
16511 switch ((ctx->opcode >> 8) & 0x3) { \
16513 mips32_op = OPC_##prfx##_S; \
16516 mips32_op = OPC_##prfx##_D; \
16518 case FMT_SDPS_PS: \
16520 mips32_op = OPC_##prfx##_PS; \
16523 goto pool32f_invalid; \
16526 check_insn(ctx
, ISA_MIPS32R6
);
16527 switch ((ctx
->opcode
>> 9) & 0x3) {
16529 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16532 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16535 goto pool32f_invalid
;
16539 check_insn(ctx
, ISA_MIPS32R6
);
16540 switch ((ctx
->opcode
>> 9) & 0x3) {
16542 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16545 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16548 goto pool32f_invalid
;
16552 /* regular FP ops */
16553 switch ((ctx
->opcode
>> 6) & 0x3) {
16555 FINSN_3ARG_SDPS(ADD
);
16558 FINSN_3ARG_SDPS(SUB
);
16561 FINSN_3ARG_SDPS(MUL
);
16564 fmt
= (ctx
->opcode
>> 8) & 0x3;
16566 mips32_op
= OPC_DIV_D
;
16567 } else if (fmt
== 0) {
16568 mips32_op
= OPC_DIV_S
;
16570 goto pool32f_invalid
;
16574 goto pool32f_invalid
;
16579 switch ((ctx
->opcode
>> 6) & 0x7) {
16580 case MOVN_FMT
: /* SELEQZ_FMT */
16581 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16583 switch ((ctx
->opcode
>> 9) & 0x3) {
16585 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16588 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16591 goto pool32f_invalid
;
16595 FINSN_3ARG_SDPS(MOVN
);
16599 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16600 FINSN_3ARG_SDPS(MOVN
);
16602 case MOVZ_FMT
: /* SELNEZ_FMT */
16603 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16605 switch ((ctx
->opcode
>> 9) & 0x3) {
16607 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16610 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16613 goto pool32f_invalid
;
16617 FINSN_3ARG_SDPS(MOVZ
);
16621 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16622 FINSN_3ARG_SDPS(MOVZ
);
16625 check_insn(ctx
, ISA_MIPS32R6
);
16626 switch ((ctx
->opcode
>> 9) & 0x3) {
16628 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16631 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16634 goto pool32f_invalid
;
16638 check_insn(ctx
, ISA_MIPS32R6
);
16639 switch ((ctx
->opcode
>> 9) & 0x3) {
16641 mips32_op
= OPC_MADDF_S
;
16644 mips32_op
= OPC_MADDF_D
;
16647 goto pool32f_invalid
;
16651 check_insn(ctx
, ISA_MIPS32R6
);
16652 switch ((ctx
->opcode
>> 9) & 0x3) {
16654 mips32_op
= OPC_MSUBF_S
;
16657 mips32_op
= OPC_MSUBF_D
;
16660 goto pool32f_invalid
;
16664 goto pool32f_invalid
;
16668 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16672 MIPS_INVAL("pool32f");
16673 generate_exception_end(ctx
, EXCP_RI
);
16677 generate_exception_err(ctx
, EXCP_CpU
, 1);
16681 minor
= (ctx
->opcode
>> 21) & 0x1f;
16684 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16685 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16688 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16689 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16690 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16693 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16694 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16695 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16698 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16699 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16702 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16703 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16704 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16707 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16708 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16709 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16712 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16713 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16717 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16721 case TLTI
: /* BC1EQZC */
16722 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16724 check_cp1_enabled(ctx
);
16725 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16728 mips32_op
= OPC_TLTI
;
16732 case TGEI
: /* BC1NEZC */
16733 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16735 check_cp1_enabled(ctx
);
16736 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16739 mips32_op
= OPC_TGEI
;
16744 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16745 mips32_op
= OPC_TLTIU
;
16748 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16749 mips32_op
= OPC_TGEIU
;
16751 case TNEI
: /* SYNCI */
16752 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16754 /* Break the TB to be able to sync copied instructions
16756 ctx
->base
.is_jmp
= DISAS_STOP
;
16759 mips32_op
= OPC_TNEI
;
16764 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16765 mips32_op
= OPC_TEQI
;
16767 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16772 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16773 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16774 4, rs
, 0, imm
<< 1, 0);
16775 /* Compact branches don't have a delay slot, so just let
16776 the normal delay slot handling take us to the branch
16780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16781 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16784 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16785 /* Break the TB to be able to sync copied instructions
16787 ctx
->base
.is_jmp
= DISAS_STOP
;
16791 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16792 /* COP2: Not implemented. */
16793 generate_exception_err(ctx
, EXCP_CpU
, 2);
16796 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16797 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16800 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16801 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16804 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16805 mips32_op
= OPC_BC1FANY4
;
16808 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16809 mips32_op
= OPC_BC1TANY4
;
16812 check_insn(ctx
, ASE_MIPS3D
);
16815 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16816 check_cp1_enabled(ctx
);
16817 gen_compute_branch1(ctx
, mips32_op
,
16818 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16820 generate_exception_err(ctx
, EXCP_CpU
, 1);
16825 /* MIPS DSP: not implemented */
16828 MIPS_INVAL("pool32i");
16829 generate_exception_end(ctx
, EXCP_RI
);
16834 minor
= (ctx
->opcode
>> 12) & 0xf;
16835 offset
= sextract32(ctx
->opcode
, 0,
16836 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16839 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16840 mips32_op
= OPC_LWL
;
16843 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16844 mips32_op
= OPC_SWL
;
16847 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16848 mips32_op
= OPC_LWR
;
16851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16852 mips32_op
= OPC_SWR
;
16854 #if defined(TARGET_MIPS64)
16856 check_insn(ctx
, ISA_MIPS3
);
16857 check_mips_64(ctx
);
16858 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16859 mips32_op
= OPC_LDL
;
16862 check_insn(ctx
, ISA_MIPS3
);
16863 check_mips_64(ctx
);
16864 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16865 mips32_op
= OPC_SDL
;
16868 check_insn(ctx
, ISA_MIPS3
);
16869 check_mips_64(ctx
);
16870 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16871 mips32_op
= OPC_LDR
;
16874 check_insn(ctx
, ISA_MIPS3
);
16875 check_mips_64(ctx
);
16876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16877 mips32_op
= OPC_SDR
;
16880 check_insn(ctx
, ISA_MIPS3
);
16881 check_mips_64(ctx
);
16882 mips32_op
= OPC_LWU
;
16885 check_insn(ctx
, ISA_MIPS3
);
16886 check_mips_64(ctx
);
16887 mips32_op
= OPC_LLD
;
16891 mips32_op
= OPC_LL
;
16894 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16897 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16900 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
16902 #if defined(TARGET_MIPS64)
16904 check_insn(ctx
, ISA_MIPS3
);
16905 check_mips_64(ctx
);
16906 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
16911 MIPS_INVAL("pool32c ld-eva");
16912 generate_exception_end(ctx
, EXCP_RI
);
16915 check_cp0_enabled(ctx
);
16917 minor2
= (ctx
->opcode
>> 9) & 0x7;
16918 offset
= sextract32(ctx
->opcode
, 0, 9);
16921 mips32_op
= OPC_LBUE
;
16924 mips32_op
= OPC_LHUE
;
16927 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16928 mips32_op
= OPC_LWLE
;
16931 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16932 mips32_op
= OPC_LWRE
;
16935 mips32_op
= OPC_LBE
;
16938 mips32_op
= OPC_LHE
;
16941 mips32_op
= OPC_LLE
;
16944 mips32_op
= OPC_LWE
;
16950 MIPS_INVAL("pool32c st-eva");
16951 generate_exception_end(ctx
, EXCP_RI
);
16954 check_cp0_enabled(ctx
);
16956 minor2
= (ctx
->opcode
>> 9) & 0x7;
16957 offset
= sextract32(ctx
->opcode
, 0, 9);
16960 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16961 mips32_op
= OPC_SWLE
;
16964 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16965 mips32_op
= OPC_SWRE
;
16968 /* Treat as no-op */
16969 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16970 /* hint codes 24-31 are reserved and signal RI */
16971 generate_exception(ctx
, EXCP_RI
);
16975 /* Treat as no-op */
16976 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16977 gen_cache_operation(ctx
, rt
, rs
, offset
);
16981 mips32_op
= OPC_SBE
;
16984 mips32_op
= OPC_SHE
;
16987 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
16990 mips32_op
= OPC_SWE
;
16995 /* Treat as no-op */
16996 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
16997 /* hint codes 24-31 are reserved and signal RI */
16998 generate_exception(ctx
, EXCP_RI
);
17002 MIPS_INVAL("pool32c");
17003 generate_exception_end(ctx
, EXCP_RI
);
17007 case ADDI32
: /* AUI, LUI */
17008 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17010 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
17013 mips32_op
= OPC_ADDI
;
17018 mips32_op
= OPC_ADDIU
;
17020 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17023 /* Logical operations */
17025 mips32_op
= OPC_ORI
;
17028 mips32_op
= OPC_XORI
;
17031 mips32_op
= OPC_ANDI
;
17033 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17036 /* Set less than immediate */
17038 mips32_op
= OPC_SLTI
;
17041 mips32_op
= OPC_SLTIU
;
17043 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17046 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17047 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17048 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17049 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17051 case JALS32
: /* BOVC, BEQC, BEQZALC */
17052 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17055 mips32_op
= OPC_BOVC
;
17056 } else if (rs
< rt
&& rs
== 0) {
17058 mips32_op
= OPC_BEQZALC
;
17061 mips32_op
= OPC_BEQC
;
17063 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17066 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17067 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17068 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17071 case BEQ32
: /* BC */
17072 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17074 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17075 sextract32(ctx
->opcode
<< 1, 0, 27));
17078 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17081 case BNE32
: /* BALC */
17082 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17084 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17085 sextract32(ctx
->opcode
<< 1, 0, 27));
17088 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17091 case J32
: /* BGTZC, BLTZC, BLTC */
17092 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17093 if (rs
== 0 && rt
!= 0) {
17095 mips32_op
= OPC_BGTZC
;
17096 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17098 mips32_op
= OPC_BLTZC
;
17101 mips32_op
= OPC_BLTC
;
17103 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17106 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17107 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17110 case JAL32
: /* BLEZC, BGEZC, BGEC */
17111 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17112 if (rs
== 0 && rt
!= 0) {
17114 mips32_op
= OPC_BLEZC
;
17115 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17117 mips32_op
= OPC_BGEZC
;
17120 mips32_op
= OPC_BGEC
;
17122 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17125 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17126 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17127 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17130 /* Floating point (COP1) */
17132 mips32_op
= OPC_LWC1
;
17135 mips32_op
= OPC_LDC1
;
17138 mips32_op
= OPC_SWC1
;
17141 mips32_op
= OPC_SDC1
;
17143 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17145 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17146 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17147 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17148 switch ((ctx
->opcode
>> 16) & 0x1f) {
17157 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17160 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17163 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17173 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17176 generate_exception(ctx
, EXCP_RI
);
17181 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17182 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17184 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17187 case BNVC
: /* BNEC, BNEZALC */
17188 check_insn(ctx
, ISA_MIPS32R6
);
17191 mips32_op
= OPC_BNVC
;
17192 } else if (rs
< rt
&& rs
== 0) {
17194 mips32_op
= OPC_BNEZALC
;
17197 mips32_op
= OPC_BNEC
;
17199 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17201 case R6_BNEZC
: /* JIALC */
17202 check_insn(ctx
, ISA_MIPS32R6
);
17205 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17206 sextract32(ctx
->opcode
<< 1, 0, 22));
17209 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17212 case R6_BEQZC
: /* JIC */
17213 check_insn(ctx
, ISA_MIPS32R6
);
17216 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17217 sextract32(ctx
->opcode
<< 1, 0, 22));
17220 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17223 case BLEZALC
: /* BGEZALC, BGEUC */
17224 check_insn(ctx
, ISA_MIPS32R6
);
17225 if (rs
== 0 && rt
!= 0) {
17227 mips32_op
= OPC_BLEZALC
;
17228 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17230 mips32_op
= OPC_BGEZALC
;
17233 mips32_op
= OPC_BGEUC
;
17235 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17237 case BGTZALC
: /* BLTZALC, BLTUC */
17238 check_insn(ctx
, ISA_MIPS32R6
);
17239 if (rs
== 0 && rt
!= 0) {
17241 mips32_op
= OPC_BGTZALC
;
17242 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17244 mips32_op
= OPC_BLTZALC
;
17247 mips32_op
= OPC_BLTUC
;
17249 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17251 /* Loads and stores */
17253 mips32_op
= OPC_LB
;
17256 mips32_op
= OPC_LBU
;
17259 mips32_op
= OPC_LH
;
17262 mips32_op
= OPC_LHU
;
17265 mips32_op
= OPC_LW
;
17267 #ifdef TARGET_MIPS64
17269 check_insn(ctx
, ISA_MIPS3
);
17270 check_mips_64(ctx
);
17271 mips32_op
= OPC_LD
;
17274 check_insn(ctx
, ISA_MIPS3
);
17275 check_mips_64(ctx
);
17276 mips32_op
= OPC_SD
;
17280 mips32_op
= OPC_SB
;
17283 mips32_op
= OPC_SH
;
17286 mips32_op
= OPC_SW
;
17289 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17292 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17295 generate_exception_end(ctx
, EXCP_RI
);
17300 static int decode_micromips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
17304 /* make sure instructions are on a halfword boundary */
17305 if (ctx
->base
.pc_next
& 0x1) {
17306 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17307 generate_exception_end(ctx
, EXCP_AdEL
);
17311 op
= (ctx
->opcode
>> 10) & 0x3f;
17312 /* Enforce properly-sized instructions in a delay slot */
17313 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17314 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17316 /* POOL32A, POOL32B, POOL32I, POOL32C */
17318 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17320 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17322 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17324 /* LB32, LH32, LWC132, LDC132, LW32 */
17325 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17326 generate_exception_end(ctx
, EXCP_RI
);
17331 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17333 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17335 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17336 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17337 generate_exception_end(ctx
, EXCP_RI
);
17347 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17348 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17349 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17352 switch (ctx
->opcode
& 0x1) {
17360 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17361 /* In the Release 6 the register number location in
17362 * the instruction encoding has changed.
17364 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17366 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17372 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17373 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17374 int amount
= (ctx
->opcode
>> 1) & 0x7;
17376 amount
= amount
== 0 ? 8 : amount
;
17378 switch (ctx
->opcode
& 0x1) {
17387 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17391 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17392 gen_pool16c_r6_insn(ctx
);
17394 gen_pool16c_insn(ctx
);
17399 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17400 int rb
= 28; /* GP */
17401 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17403 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17407 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17408 if (ctx
->opcode
& 1) {
17409 generate_exception_end(ctx
, EXCP_RI
);
17412 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17413 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17414 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17415 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17420 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17421 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17422 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17423 offset
= (offset
== 0xf ? -1 : offset
);
17425 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17430 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17431 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17432 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17434 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17439 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17440 int rb
= 29; /* SP */
17441 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17443 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17448 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17449 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17450 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17452 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17457 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17458 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17459 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17461 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17466 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17467 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17468 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17470 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17475 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17476 int rb
= 29; /* SP */
17477 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17479 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17484 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17485 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17486 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17488 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17493 int rd
= uMIPS_RD5(ctx
->opcode
);
17494 int rs
= uMIPS_RS5(ctx
->opcode
);
17496 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17503 switch (ctx
->opcode
& 0x1) {
17513 switch (ctx
->opcode
& 0x1) {
17518 gen_addiur1sp(ctx
);
17522 case B16
: /* BC16 */
17523 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17524 sextract32(ctx
->opcode
, 0, 10) << 1,
17525 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17527 case BNEZ16
: /* BNEZC16 */
17528 case BEQZ16
: /* BEQZC16 */
17529 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17530 mmreg(uMIPS_RD(ctx
->opcode
)),
17531 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17532 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17537 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17538 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17540 imm
= (imm
== 0x7f ? -1 : imm
);
17541 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17547 generate_exception_end(ctx
, EXCP_RI
);
17550 decode_micromips32_opc(env
, ctx
);
17563 /* MAJOR, P16, and P32 pools opcodes */
17567 NM_MOVE_BALC
= 0x02,
17575 NM_P16_SHIFT
= 0x0c,
17593 NM_P_LS_U12
= 0x21,
17603 NM_P16_ADDU
= 0x2c,
17617 NM_MOVEPREV
= 0x3f,
17620 /* POOL32A instruction pool */
17622 NM_POOL32A0
= 0x00,
17623 NM_SPECIAL2
= 0x01,
17626 NM_POOL32A5
= 0x05,
17627 NM_POOL32A7
= 0x07,
17630 /* P.GP.W instruction pool */
17632 NM_ADDIUGP_W
= 0x00,
17637 /* P48I instruction pool */
17641 NM_ADDIUGP48
= 0x02,
17642 NM_ADDIUPC48
= 0x03,
17647 /* P.U12 instruction pool */
17656 NM_ADDIUNEG
= 0x08,
17663 /* POOL32F instruction pool */
17665 NM_POOL32F_0
= 0x00,
17666 NM_POOL32F_3
= 0x03,
17667 NM_POOL32F_5
= 0x05,
17670 /* POOL32S instruction pool */
17672 NM_POOL32S_0
= 0x00,
17673 NM_POOL32S_4
= 0x04,
17676 /* P.LUI instruction pool */
17682 /* P.GP.BH instruction pool */
17687 NM_ADDIUGP_B
= 0x03,
17690 NM_P_GP_CP1
= 0x06,
17693 /* P.LS.U12 instruction pool */
17698 NM_P_PREFU12
= 0x03,
17711 /* P.LS.S9 instruction pool */
17717 NM_P_LS_UAWM
= 0x05,
17720 /* P.BAL instruction pool */
17726 /* P.J instruction pool */
17729 NM_JALRC_HB
= 0x01,
17730 NM_P_BALRSC
= 0x08,
17733 /* P.BR1 instruction pool */
17741 /* P.BR2 instruction pool */
17748 /* P.BRI instruction pool */
17760 /* P16.SHIFT instruction pool */
17766 /* POOL16C instruction pool */
17768 NM_POOL16C_0
= 0x00,
17772 /* P16.A1 instruction pool */
17774 NM_ADDIUR1SP
= 0x01,
17777 /* P16.A2 instruction pool */
17780 NM_P_ADDIURS5
= 0x01,
17783 /* P16.ADDU instruction pool */
17789 /* P16.SR instruction pool */
17792 NM_RESTORE_JRC16
= 0x01,
17795 /* P16.4X4 instruction pool */
17801 /* P16.LB instruction pool */
17808 /* P16.LH instruction pool */
17815 /* P.RI instruction pool */
17818 NM_P_SYSCALL
= 0x01,
17823 /* POOL32A0 instruction pool */
17858 NM_D_E_MT_VPE
= 0x56,
17866 /* CRC32 instruction pool */
17876 /* POOL32A5 instruction pool */
17878 NM_CMP_EQ_PH
= 0x00,
17879 NM_CMP_LT_PH
= 0x08,
17880 NM_CMP_LE_PH
= 0x10,
17881 NM_CMPGU_EQ_QB
= 0x18,
17882 NM_CMPGU_LT_QB
= 0x20,
17883 NM_CMPGU_LE_QB
= 0x28,
17884 NM_CMPGDU_EQ_QB
= 0x30,
17885 NM_CMPGDU_LT_QB
= 0x38,
17886 NM_CMPGDU_LE_QB
= 0x40,
17887 NM_CMPU_EQ_QB
= 0x48,
17888 NM_CMPU_LT_QB
= 0x50,
17889 NM_CMPU_LE_QB
= 0x58,
17890 NM_ADDQ_S_W
= 0x60,
17891 NM_SUBQ_S_W
= 0x68,
17895 NM_ADDQ_S_PH
= 0x01,
17896 NM_ADDQH_R_PH
= 0x09,
17897 NM_ADDQH_R_W
= 0x11,
17898 NM_ADDU_S_QB
= 0x19,
17899 NM_ADDU_S_PH
= 0x21,
17900 NM_ADDUH_R_QB
= 0x29,
17901 NM_SHRAV_R_PH
= 0x31,
17902 NM_SHRAV_R_QB
= 0x39,
17903 NM_SUBQ_S_PH
= 0x41,
17904 NM_SUBQH_R_PH
= 0x49,
17905 NM_SUBQH_R_W
= 0x51,
17906 NM_SUBU_S_QB
= 0x59,
17907 NM_SUBU_S_PH
= 0x61,
17908 NM_SUBUH_R_QB
= 0x69,
17909 NM_SHLLV_S_PH
= 0x71,
17910 NM_PRECR_SRA_R_PH_W
= 0x79,
17912 NM_MULEU_S_PH_QBL
= 0x12,
17913 NM_MULEU_S_PH_QBR
= 0x1a,
17914 NM_MULQ_RS_PH
= 0x22,
17915 NM_MULQ_S_PH
= 0x2a,
17916 NM_MULQ_RS_W
= 0x32,
17917 NM_MULQ_S_W
= 0x3a,
17920 NM_SHRAV_R_W
= 0x5a,
17921 NM_SHRLV_PH
= 0x62,
17922 NM_SHRLV_QB
= 0x6a,
17923 NM_SHLLV_QB
= 0x72,
17924 NM_SHLLV_S_W
= 0x7a,
17928 NM_MULEQ_S_W_PHL
= 0x04,
17929 NM_MULEQ_S_W_PHR
= 0x0c,
17931 NM_MUL_S_PH
= 0x05,
17932 NM_PRECR_QB_PH
= 0x0d,
17933 NM_PRECRQ_QB_PH
= 0x15,
17934 NM_PRECRQ_PH_W
= 0x1d,
17935 NM_PRECRQ_RS_PH_W
= 0x25,
17936 NM_PRECRQU_S_QB_PH
= 0x2d,
17937 NM_PACKRL_PH
= 0x35,
17941 NM_SHRA_R_W
= 0x5e,
17942 NM_SHRA_R_PH
= 0x66,
17943 NM_SHLL_S_PH
= 0x76,
17944 NM_SHLL_S_W
= 0x7e,
17949 /* POOL32A7 instruction pool */
17954 NM_POOL32AXF
= 0x07,
17957 /* P.SR instruction pool */
17963 /* P.SHIFT instruction pool */
17971 /* P.ROTX instruction pool */
17976 /* P.INS instruction pool */
17981 /* P.EXT instruction pool */
17986 /* POOL32F_0 (fmt) instruction pool */
17991 NM_SELEQZ_S
= 0x07,
17992 NM_SELEQZ_D
= 0x47,
17996 NM_SELNEZ_S
= 0x0f,
17997 NM_SELNEZ_D
= 0x4f,
18012 /* POOL32F_3 instruction pool */
18016 NM_MINA_FMT
= 0x04,
18017 NM_MAXA_FMT
= 0x05,
18018 NM_POOL32FXF
= 0x07,
18021 /* POOL32F_5 instruction pool */
18023 NM_CMP_CONDN_S
= 0x00,
18024 NM_CMP_CONDN_D
= 0x02,
18027 /* P.GP.LH instruction pool */
18033 /* P.GP.SH instruction pool */
18038 /* P.GP.CP1 instruction pool */
18046 /* P.LS.S0 instruction pool */
18063 NM_P_PREFS9
= 0x03,
18069 /* P.LS.S1 instruction pool */
18071 NM_ASET_ACLR
= 0x02,
18079 /* P.LS.E0 instruction pool */
18095 /* P.PREFE instruction pool */
18101 /* P.LLE instruction pool */
18107 /* P.SCE instruction pool */
18113 /* P.LS.WM instruction pool */
18119 /* P.LS.UAWM instruction pool */
18125 /* P.BR3A instruction pool */
18131 NM_BPOSGE32C
= 0x04,
18134 /* P16.RI instruction pool */
18136 NM_P16_SYSCALL
= 0x01,
18141 /* POOL16C_0 instruction pool */
18143 NM_POOL16C_00
= 0x00,
18146 /* P16.JRC instruction pool */
18152 /* P.SYSCALL instruction pool */
18158 /* P.TRAP instruction pool */
18164 /* P.CMOVE instruction pool */
18170 /* POOL32Axf instruction pool */
18172 NM_POOL32AXF_1
= 0x01,
18173 NM_POOL32AXF_2
= 0x02,
18174 NM_POOL32AXF_4
= 0x04,
18175 NM_POOL32AXF_5
= 0x05,
18176 NM_POOL32AXF_7
= 0x07,
18179 /* POOL32Axf_1 instruction pool */
18181 NM_POOL32AXF_1_0
= 0x00,
18182 NM_POOL32AXF_1_1
= 0x01,
18183 NM_POOL32AXF_1_3
= 0x03,
18184 NM_POOL32AXF_1_4
= 0x04,
18185 NM_POOL32AXF_1_5
= 0x05,
18186 NM_POOL32AXF_1_7
= 0x07,
18189 /* POOL32Axf_2 instruction pool */
18191 NM_POOL32AXF_2_0_7
= 0x00,
18192 NM_POOL32AXF_2_8_15
= 0x01,
18193 NM_POOL32AXF_2_16_23
= 0x02,
18194 NM_POOL32AXF_2_24_31
= 0x03,
18197 /* POOL32Axf_7 instruction pool */
18199 NM_SHRA_R_QB
= 0x0,
18204 /* POOL32Axf_1_0 instruction pool */
18212 /* POOL32Axf_1_1 instruction pool */
18218 /* POOL32Axf_1_3 instruction pool */
18226 /* POOL32Axf_1_4 instruction pool */
18232 /* POOL32Axf_1_5 instruction pool */
18234 NM_MAQ_S_W_PHR
= 0x0,
18235 NM_MAQ_S_W_PHL
= 0x1,
18236 NM_MAQ_SA_W_PHR
= 0x2,
18237 NM_MAQ_SA_W_PHL
= 0x3,
18240 /* POOL32Axf_1_7 instruction pool */
18244 NM_EXTR_RS_W
= 0x2,
18248 /* POOL32Axf_2_0_7 instruction pool */
18251 NM_DPAQ_S_W_PH
= 0x1,
18253 NM_DPSQ_S_W_PH
= 0x3,
18260 /* POOL32Axf_2_8_15 instruction pool */
18262 NM_DPAX_W_PH
= 0x0,
18263 NM_DPAQ_SA_L_W
= 0x1,
18264 NM_DPSX_W_PH
= 0x2,
18265 NM_DPSQ_SA_L_W
= 0x3,
18268 NM_EXTRV_R_W
= 0x7,
18271 /* POOL32Axf_2_16_23 instruction pool */
18273 NM_DPAU_H_QBL
= 0x0,
18274 NM_DPAQX_S_W_PH
= 0x1,
18275 NM_DPSU_H_QBL
= 0x2,
18276 NM_DPSQX_S_W_PH
= 0x3,
18279 NM_MULSA_W_PH
= 0x6,
18280 NM_EXTRV_RS_W
= 0x7,
18283 /* POOL32Axf_2_24_31 instruction pool */
18285 NM_DPAU_H_QBR
= 0x0,
18286 NM_DPAQX_SA_W_PH
= 0x1,
18287 NM_DPSU_H_QBR
= 0x2,
18288 NM_DPSQX_SA_W_PH
= 0x3,
18291 NM_MULSAQ_S_W_PH
= 0x6,
18292 NM_EXTRV_S_H
= 0x7,
18295 /* POOL32Axf_{4, 5} instruction pool */
18314 /* nanoMIPS DSP instructions */
18315 NM_ABSQ_S_QB
= 0x00,
18316 NM_ABSQ_S_PH
= 0x08,
18317 NM_ABSQ_S_W
= 0x10,
18318 NM_PRECEQ_W_PHL
= 0x28,
18319 NM_PRECEQ_W_PHR
= 0x30,
18320 NM_PRECEQU_PH_QBL
= 0x38,
18321 NM_PRECEQU_PH_QBR
= 0x48,
18322 NM_PRECEU_PH_QBL
= 0x58,
18323 NM_PRECEU_PH_QBR
= 0x68,
18324 NM_PRECEQU_PH_QBLA
= 0x39,
18325 NM_PRECEQU_PH_QBRA
= 0x49,
18326 NM_PRECEU_PH_QBLA
= 0x59,
18327 NM_PRECEU_PH_QBRA
= 0x69,
18328 NM_REPLV_PH
= 0x01,
18329 NM_REPLV_QB
= 0x09,
18332 NM_RADDU_W_QB
= 0x78,
18338 /* PP.SR instruction pool */
18342 NM_RESTORE_JRC
= 0x03,
18345 /* P.SR.F instruction pool */
18348 NM_RESTOREF
= 0x01,
18351 /* P16.SYSCALL instruction pool */
18353 NM_SYSCALL16
= 0x00,
18354 NM_HYPCALL16
= 0x01,
18357 /* POOL16C_00 instruction pool */
18365 /* PP.LSX and PP.LSXS instruction pool */
18403 /* ERETx instruction pool */
18409 /* POOL32FxF_{0, 1} insturction pool */
18418 NM_CVT_S_PL
= 0x84,
18419 NM_CVT_S_PU
= 0xa4,
18421 NM_CVT_L_S
= 0x004,
18422 NM_CVT_L_D
= 0x104,
18423 NM_CVT_W_S
= 0x024,
18424 NM_CVT_W_D
= 0x124,
18426 NM_RSQRT_S
= 0x008,
18427 NM_RSQRT_D
= 0x108,
18432 NM_RECIP_S
= 0x048,
18433 NM_RECIP_D
= 0x148,
18435 NM_FLOOR_L_S
= 0x00c,
18436 NM_FLOOR_L_D
= 0x10c,
18438 NM_FLOOR_W_S
= 0x02c,
18439 NM_FLOOR_W_D
= 0x12c,
18441 NM_CEIL_L_S
= 0x04c,
18442 NM_CEIL_L_D
= 0x14c,
18443 NM_CEIL_W_S
= 0x06c,
18444 NM_CEIL_W_D
= 0x16c,
18445 NM_TRUNC_L_S
= 0x08c,
18446 NM_TRUNC_L_D
= 0x18c,
18447 NM_TRUNC_W_S
= 0x0ac,
18448 NM_TRUNC_W_D
= 0x1ac,
18449 NM_ROUND_L_S
= 0x0cc,
18450 NM_ROUND_L_D
= 0x1cc,
18451 NM_ROUND_W_S
= 0x0ec,
18452 NM_ROUND_W_D
= 0x1ec,
18460 NM_CVT_D_S
= 0x04d,
18461 NM_CVT_D_W
= 0x0cd,
18462 NM_CVT_D_L
= 0x14d,
18463 NM_CVT_S_D
= 0x06d,
18464 NM_CVT_S_W
= 0x0ed,
18465 NM_CVT_S_L
= 0x16d,
18468 /* P.LL instruction pool */
18474 /* P.SC instruction pool */
18480 /* P.DVP instruction pool */
18489 * nanoMIPS decoding engine
18494 /* extraction utilities */
18496 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18497 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18498 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18499 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18500 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18502 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18503 static inline int decode_gpr_gpr3(int r
)
18505 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18507 return map
[r
& 0x7];
18510 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18511 static inline int decode_gpr_gpr3_src_store(int r
)
18513 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18515 return map
[r
& 0x7];
18518 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18519 static inline int decode_gpr_gpr4(int r
)
18521 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18522 16, 17, 18, 19, 20, 21, 22, 23 };
18524 return map
[r
& 0xf];
18527 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18528 static inline int decode_gpr_gpr4_zero(int r
)
18530 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18531 16, 17, 18, 19, 20, 21, 22, 23 };
18533 return map
[r
& 0xf];
18537 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18539 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18542 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18543 uint8_t gp
, uint16_t u
)
18546 TCGv va
= tcg_temp_new();
18547 TCGv t0
= tcg_temp_new();
18549 while (counter
!= count
) {
18550 bool use_gp
= gp
&& (counter
== count
- 1);
18551 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18552 int this_offset
= -((counter
+ 1) << 2);
18553 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18554 gen_load_gpr(t0
, this_rt
);
18555 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18556 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18560 /* adjust stack pointer */
18561 gen_adjust_sp(ctx
, -u
);
18567 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18568 uint8_t gp
, uint16_t u
)
18571 TCGv va
= tcg_temp_new();
18572 TCGv t0
= tcg_temp_new();
18574 while (counter
!= count
) {
18575 bool use_gp
= gp
&& (counter
== count
- 1);
18576 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18577 int this_offset
= u
- ((counter
+ 1) << 2);
18578 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18579 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18580 ctx
->default_tcg_memop_mask
);
18581 tcg_gen_ext32s_tl(t0
, t0
);
18582 gen_store_gpr(t0
, this_rt
);
18586 /* adjust stack pointer */
18587 gen_adjust_sp(ctx
, u
);
18593 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18595 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18596 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18598 switch (extract32(ctx
->opcode
, 2, 2)) {
18600 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18603 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18606 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18609 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18614 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18616 int rt
= extract32(ctx
->opcode
, 21, 5);
18617 int rs
= extract32(ctx
->opcode
, 16, 5);
18618 int rd
= extract32(ctx
->opcode
, 11, 5);
18620 switch (extract32(ctx
->opcode
, 3, 7)) {
18622 switch (extract32(ctx
->opcode
, 10, 1)) {
18625 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18629 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18635 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18639 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18642 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18645 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18648 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18651 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18654 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18657 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18660 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18664 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18667 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18670 switch (extract32(ctx
->opcode
, 10, 1)) {
18672 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18675 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18680 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18683 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18686 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18689 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18692 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18697 #ifndef CONFIG_USER_ONLY
18698 TCGv t0
= tcg_temp_new();
18699 switch (extract32(ctx
->opcode
, 10, 1)) {
18702 check_cp0_enabled(ctx
);
18703 gen_helper_dvp(t0
, cpu_env
);
18704 gen_store_gpr(t0
, rt
);
18709 check_cp0_enabled(ctx
);
18710 gen_helper_evp(t0
, cpu_env
);
18711 gen_store_gpr(t0
, rt
);
18718 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18723 TCGv t0
= tcg_temp_new();
18724 TCGv t1
= tcg_temp_new();
18725 TCGv t2
= tcg_temp_new();
18727 gen_load_gpr(t1
, rs
);
18728 gen_load_gpr(t2
, rt
);
18729 tcg_gen_add_tl(t0
, t1
, t2
);
18730 tcg_gen_ext32s_tl(t0
, t0
);
18731 tcg_gen_xor_tl(t1
, t1
, t2
);
18732 tcg_gen_xor_tl(t2
, t0
, t2
);
18733 tcg_gen_andc_tl(t1
, t2
, t1
);
18735 /* operands of same sign, result different sign */
18736 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18737 gen_store_gpr(t0
, rd
);
18745 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18748 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18751 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18754 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18757 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18760 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18763 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18766 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18768 #ifndef CONFIG_USER_ONLY
18770 check_cp0_enabled(ctx
);
18772 /* Treat as NOP. */
18775 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18778 check_cp0_enabled(ctx
);
18780 TCGv t0
= tcg_temp_new();
18782 gen_load_gpr(t0
, rt
);
18783 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18787 case NM_D_E_MT_VPE
:
18789 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18790 TCGv t0
= tcg_temp_new();
18797 gen_helper_dmt(t0
);
18798 gen_store_gpr(t0
, rt
);
18799 } else if (rs
== 0) {
18802 gen_helper_dvpe(t0
, cpu_env
);
18803 gen_store_gpr(t0
, rt
);
18805 generate_exception_end(ctx
, EXCP_RI
);
18812 gen_helper_emt(t0
);
18813 gen_store_gpr(t0
, rt
);
18814 } else if (rs
== 0) {
18817 gen_helper_evpe(t0
, cpu_env
);
18818 gen_store_gpr(t0
, rt
);
18820 generate_exception_end(ctx
, EXCP_RI
);
18831 TCGv t0
= tcg_temp_new();
18832 TCGv t1
= tcg_temp_new();
18834 gen_load_gpr(t0
, rt
);
18835 gen_load_gpr(t1
, rs
);
18836 gen_helper_fork(t0
, t1
);
18843 check_cp0_enabled(ctx
);
18845 /* Treat as NOP. */
18848 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18849 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18853 check_cp0_enabled(ctx
);
18854 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18855 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18860 TCGv t0
= tcg_temp_new();
18862 gen_load_gpr(t0
, rs
);
18863 gen_helper_yield(t0
, cpu_env
, t0
);
18864 gen_store_gpr(t0
, rt
);
18870 generate_exception_end(ctx
, EXCP_RI
);
18876 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18877 int ret
, int v1
, int v2
)
18883 t0
= tcg_temp_new_i32();
18885 v0_t
= tcg_temp_new();
18886 v1_t
= tcg_temp_new();
18888 tcg_gen_movi_i32(t0
, v2
>> 3);
18890 gen_load_gpr(v0_t
, ret
);
18891 gen_load_gpr(v1_t
, v1
);
18894 case NM_MAQ_S_W_PHR
:
18896 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18898 case NM_MAQ_S_W_PHL
:
18900 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18902 case NM_MAQ_SA_W_PHR
:
18904 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18906 case NM_MAQ_SA_W_PHL
:
18908 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18911 generate_exception_end(ctx
, EXCP_RI
);
18915 tcg_temp_free_i32(t0
);
18917 tcg_temp_free(v0_t
);
18918 tcg_temp_free(v1_t
);
18922 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18923 int ret
, int v1
, int v2
)
18926 TCGv t0
= tcg_temp_new();
18927 TCGv t1
= tcg_temp_new();
18928 TCGv v0_t
= tcg_temp_new();
18930 gen_load_gpr(v0_t
, v1
);
18933 case NM_POOL32AXF_1_0
:
18935 switch (extract32(ctx
->opcode
, 12, 2)) {
18937 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
18940 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
18943 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
18946 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
18950 case NM_POOL32AXF_1_1
:
18952 switch (extract32(ctx
->opcode
, 12, 2)) {
18954 tcg_gen_movi_tl(t0
, v2
);
18955 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
18958 tcg_gen_movi_tl(t0
, v2
>> 3);
18959 gen_helper_shilo(t0
, v0_t
, cpu_env
);
18962 generate_exception_end(ctx
, EXCP_RI
);
18966 case NM_POOL32AXF_1_3
:
18968 imm
= extract32(ctx
->opcode
, 14, 7);
18969 switch (extract32(ctx
->opcode
, 12, 2)) {
18971 tcg_gen_movi_tl(t0
, imm
);
18972 gen_helper_rddsp(t0
, t0
, cpu_env
);
18973 gen_store_gpr(t0
, ret
);
18976 gen_load_gpr(t0
, ret
);
18977 tcg_gen_movi_tl(t1
, imm
);
18978 gen_helper_wrdsp(t0
, t1
, cpu_env
);
18981 tcg_gen_movi_tl(t0
, v2
>> 3);
18982 tcg_gen_movi_tl(t1
, v1
);
18983 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
18984 gen_store_gpr(t0
, ret
);
18987 tcg_gen_movi_tl(t0
, v2
>> 3);
18988 tcg_gen_movi_tl(t1
, v1
);
18989 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
18990 gen_store_gpr(t0
, ret
);
18994 case NM_POOL32AXF_1_4
:
18996 tcg_gen_movi_tl(t0
, v2
>> 2);
18997 switch (extract32(ctx
->opcode
, 12, 1)) {
18999 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
19000 gen_store_gpr(t0
, ret
);
19003 gen_helper_shrl_qb(t0
, t0
, v0_t
);
19004 gen_store_gpr(t0
, ret
);
19008 case NM_POOL32AXF_1_5
:
19009 opc
= extract32(ctx
->opcode
, 12, 2);
19010 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
19012 case NM_POOL32AXF_1_7
:
19014 tcg_gen_movi_tl(t0
, v2
>> 3);
19015 tcg_gen_movi_tl(t1
, v1
);
19016 switch (extract32(ctx
->opcode
, 12, 2)) {
19018 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
19019 gen_store_gpr(t0
, ret
);
19022 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
19023 gen_store_gpr(t0
, ret
);
19026 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
19027 gen_store_gpr(t0
, ret
);
19030 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
19031 gen_store_gpr(t0
, ret
);
19036 generate_exception_end(ctx
, EXCP_RI
);
19042 tcg_temp_free(v0_t
);
19045 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19046 TCGv v0
, TCGv v1
, int rd
)
19050 t0
= tcg_temp_new_i32();
19052 tcg_gen_movi_i32(t0
, rd
>> 3);
19055 case NM_POOL32AXF_2_0_7
:
19056 switch (extract32(ctx
->opcode
, 9, 3)) {
19059 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19061 case NM_DPAQ_S_W_PH
:
19063 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19067 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19069 case NM_DPSQ_S_W_PH
:
19071 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19074 generate_exception_end(ctx
, EXCP_RI
);
19078 case NM_POOL32AXF_2_8_15
:
19079 switch (extract32(ctx
->opcode
, 9, 3)) {
19082 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19084 case NM_DPAQ_SA_L_W
:
19086 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19090 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19092 case NM_DPSQ_SA_L_W
:
19094 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19097 generate_exception_end(ctx
, EXCP_RI
);
19101 case NM_POOL32AXF_2_16_23
:
19102 switch (extract32(ctx
->opcode
, 9, 3)) {
19103 case NM_DPAU_H_QBL
:
19105 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19107 case NM_DPAQX_S_W_PH
:
19109 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19111 case NM_DPSU_H_QBL
:
19113 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19115 case NM_DPSQX_S_W_PH
:
19117 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19119 case NM_MULSA_W_PH
:
19121 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19124 generate_exception_end(ctx
, EXCP_RI
);
19128 case NM_POOL32AXF_2_24_31
:
19129 switch (extract32(ctx
->opcode
, 9, 3)) {
19130 case NM_DPAU_H_QBR
:
19132 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19134 case NM_DPAQX_SA_W_PH
:
19136 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19138 case NM_DPSU_H_QBR
:
19140 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19142 case NM_DPSQX_SA_W_PH
:
19144 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19146 case NM_MULSAQ_S_W_PH
:
19148 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19151 generate_exception_end(ctx
, EXCP_RI
);
19156 generate_exception_end(ctx
, EXCP_RI
);
19160 tcg_temp_free_i32(t0
);
19163 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19164 int rt
, int rs
, int rd
)
19167 TCGv t0
= tcg_temp_new();
19168 TCGv t1
= tcg_temp_new();
19169 TCGv v0_t
= tcg_temp_new();
19170 TCGv v1_t
= tcg_temp_new();
19172 gen_load_gpr(v0_t
, rt
);
19173 gen_load_gpr(v1_t
, rs
);
19176 case NM_POOL32AXF_2_0_7
:
19177 switch (extract32(ctx
->opcode
, 9, 3)) {
19179 case NM_DPAQ_S_W_PH
:
19181 case NM_DPSQ_S_W_PH
:
19182 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19187 gen_load_gpr(t0
, rs
);
19189 if (rd
!= 0 && rd
!= 2) {
19190 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19191 tcg_gen_ext32u_tl(t0
, t0
);
19192 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19193 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19195 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19201 int acc
= extract32(ctx
->opcode
, 14, 2);
19202 TCGv_i64 t2
= tcg_temp_new_i64();
19203 TCGv_i64 t3
= tcg_temp_new_i64();
19205 gen_load_gpr(t0
, rt
);
19206 gen_load_gpr(t1
, rs
);
19207 tcg_gen_ext_tl_i64(t2
, t0
);
19208 tcg_gen_ext_tl_i64(t3
, t1
);
19209 tcg_gen_mul_i64(t2
, t2
, t3
);
19210 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19211 tcg_gen_add_i64(t2
, t2
, t3
);
19212 tcg_temp_free_i64(t3
);
19213 gen_move_low32(cpu_LO
[acc
], t2
);
19214 gen_move_high32(cpu_HI
[acc
], t2
);
19215 tcg_temp_free_i64(t2
);
19221 int acc
= extract32(ctx
->opcode
, 14, 2);
19222 TCGv_i32 t2
= tcg_temp_new_i32();
19223 TCGv_i32 t3
= tcg_temp_new_i32();
19225 gen_load_gpr(t0
, rs
);
19226 gen_load_gpr(t1
, rt
);
19227 tcg_gen_trunc_tl_i32(t2
, t0
);
19228 tcg_gen_trunc_tl_i32(t3
, t1
);
19229 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19230 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19231 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19232 tcg_temp_free_i32(t2
);
19233 tcg_temp_free_i32(t3
);
19238 gen_load_gpr(v1_t
, rs
);
19239 tcg_gen_movi_tl(t0
, rd
>> 3);
19240 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19241 gen_store_gpr(t0
, ret
);
19245 case NM_POOL32AXF_2_8_15
:
19246 switch (extract32(ctx
->opcode
, 9, 3)) {
19248 case NM_DPAQ_SA_L_W
:
19250 case NM_DPSQ_SA_L_W
:
19251 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19256 int acc
= extract32(ctx
->opcode
, 14, 2);
19257 TCGv_i64 t2
= tcg_temp_new_i64();
19258 TCGv_i64 t3
= tcg_temp_new_i64();
19260 gen_load_gpr(t0
, rs
);
19261 gen_load_gpr(t1
, rt
);
19262 tcg_gen_ext32u_tl(t0
, t0
);
19263 tcg_gen_ext32u_tl(t1
, t1
);
19264 tcg_gen_extu_tl_i64(t2
, t0
);
19265 tcg_gen_extu_tl_i64(t3
, t1
);
19266 tcg_gen_mul_i64(t2
, t2
, t3
);
19267 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19268 tcg_gen_add_i64(t2
, t2
, t3
);
19269 tcg_temp_free_i64(t3
);
19270 gen_move_low32(cpu_LO
[acc
], t2
);
19271 gen_move_high32(cpu_HI
[acc
], t2
);
19272 tcg_temp_free_i64(t2
);
19278 int acc
= extract32(ctx
->opcode
, 14, 2);
19279 TCGv_i32 t2
= tcg_temp_new_i32();
19280 TCGv_i32 t3
= tcg_temp_new_i32();
19282 gen_load_gpr(t0
, rs
);
19283 gen_load_gpr(t1
, rt
);
19284 tcg_gen_trunc_tl_i32(t2
, t0
);
19285 tcg_gen_trunc_tl_i32(t3
, t1
);
19286 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19287 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19288 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19289 tcg_temp_free_i32(t2
);
19290 tcg_temp_free_i32(t3
);
19295 tcg_gen_movi_tl(t0
, rd
>> 3);
19296 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19297 gen_store_gpr(t0
, ret
);
19300 generate_exception_end(ctx
, EXCP_RI
);
19304 case NM_POOL32AXF_2_16_23
:
19305 switch (extract32(ctx
->opcode
, 9, 3)) {
19306 case NM_DPAU_H_QBL
:
19307 case NM_DPAQX_S_W_PH
:
19308 case NM_DPSU_H_QBL
:
19309 case NM_DPSQX_S_W_PH
:
19310 case NM_MULSA_W_PH
:
19311 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19315 tcg_gen_movi_tl(t0
, rd
>> 3);
19316 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19317 gen_store_gpr(t0
, ret
);
19322 int acc
= extract32(ctx
->opcode
, 14, 2);
19323 TCGv_i64 t2
= tcg_temp_new_i64();
19324 TCGv_i64 t3
= tcg_temp_new_i64();
19326 gen_load_gpr(t0
, rs
);
19327 gen_load_gpr(t1
, rt
);
19328 tcg_gen_ext_tl_i64(t2
, t0
);
19329 tcg_gen_ext_tl_i64(t3
, t1
);
19330 tcg_gen_mul_i64(t2
, t2
, t3
);
19331 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19332 tcg_gen_sub_i64(t2
, t3
, t2
);
19333 tcg_temp_free_i64(t3
);
19334 gen_move_low32(cpu_LO
[acc
], t2
);
19335 gen_move_high32(cpu_HI
[acc
], t2
);
19336 tcg_temp_free_i64(t2
);
19339 case NM_EXTRV_RS_W
:
19341 tcg_gen_movi_tl(t0
, rd
>> 3);
19342 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19343 gen_store_gpr(t0
, ret
);
19347 case NM_POOL32AXF_2_24_31
:
19348 switch (extract32(ctx
->opcode
, 9, 3)) {
19349 case NM_DPAU_H_QBR
:
19350 case NM_DPAQX_SA_W_PH
:
19351 case NM_DPSU_H_QBR
:
19352 case NM_DPSQX_SA_W_PH
:
19353 case NM_MULSAQ_S_W_PH
:
19354 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19358 tcg_gen_movi_tl(t0
, rd
>> 3);
19359 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19360 gen_store_gpr(t0
, ret
);
19365 int acc
= extract32(ctx
->opcode
, 14, 2);
19366 TCGv_i64 t2
= tcg_temp_new_i64();
19367 TCGv_i64 t3
= tcg_temp_new_i64();
19369 gen_load_gpr(t0
, rs
);
19370 gen_load_gpr(t1
, rt
);
19371 tcg_gen_ext32u_tl(t0
, t0
);
19372 tcg_gen_ext32u_tl(t1
, t1
);
19373 tcg_gen_extu_tl_i64(t2
, t0
);
19374 tcg_gen_extu_tl_i64(t3
, t1
);
19375 tcg_gen_mul_i64(t2
, t2
, t3
);
19376 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19377 tcg_gen_sub_i64(t2
, t3
, t2
);
19378 tcg_temp_free_i64(t3
);
19379 gen_move_low32(cpu_LO
[acc
], t2
);
19380 gen_move_high32(cpu_HI
[acc
], t2
);
19381 tcg_temp_free_i64(t2
);
19386 tcg_gen_movi_tl(t0
, rd
>> 3);
19387 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19388 gen_store_gpr(t0
, ret
);
19393 generate_exception_end(ctx
, EXCP_RI
);
19400 tcg_temp_free(v0_t
);
19401 tcg_temp_free(v1_t
);
19404 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19408 TCGv t0
= tcg_temp_new();
19409 TCGv v0_t
= tcg_temp_new();
19411 gen_load_gpr(v0_t
, rs
);
19416 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19417 gen_store_gpr(v0_t
, ret
);
19421 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19422 gen_store_gpr(v0_t
, ret
);
19426 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19427 gen_store_gpr(v0_t
, ret
);
19429 case NM_PRECEQ_W_PHL
:
19431 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19432 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19433 gen_store_gpr(v0_t
, ret
);
19435 case NM_PRECEQ_W_PHR
:
19437 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19438 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19439 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19440 gen_store_gpr(v0_t
, ret
);
19442 case NM_PRECEQU_PH_QBL
:
19444 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19445 gen_store_gpr(v0_t
, ret
);
19447 case NM_PRECEQU_PH_QBR
:
19449 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19450 gen_store_gpr(v0_t
, ret
);
19452 case NM_PRECEQU_PH_QBLA
:
19454 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19455 gen_store_gpr(v0_t
, ret
);
19457 case NM_PRECEQU_PH_QBRA
:
19459 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19460 gen_store_gpr(v0_t
, ret
);
19462 case NM_PRECEU_PH_QBL
:
19464 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19465 gen_store_gpr(v0_t
, ret
);
19467 case NM_PRECEU_PH_QBR
:
19469 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19470 gen_store_gpr(v0_t
, ret
);
19472 case NM_PRECEU_PH_QBLA
:
19474 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19475 gen_store_gpr(v0_t
, ret
);
19477 case NM_PRECEU_PH_QBRA
:
19479 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19480 gen_store_gpr(v0_t
, ret
);
19484 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19485 tcg_gen_shli_tl(t0
, v0_t
, 16);
19486 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19487 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19488 gen_store_gpr(v0_t
, ret
);
19492 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19493 tcg_gen_shli_tl(t0
, v0_t
, 8);
19494 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19495 tcg_gen_shli_tl(t0
, v0_t
, 16);
19496 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19497 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19498 gen_store_gpr(v0_t
, ret
);
19502 gen_helper_bitrev(v0_t
, v0_t
);
19503 gen_store_gpr(v0_t
, ret
);
19508 TCGv tv0
= tcg_temp_new();
19510 gen_load_gpr(tv0
, rt
);
19511 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19512 gen_store_gpr(v0_t
, ret
);
19513 tcg_temp_free(tv0
);
19516 case NM_RADDU_W_QB
:
19518 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19519 gen_store_gpr(v0_t
, ret
);
19522 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19526 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19530 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19533 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19536 generate_exception_end(ctx
, EXCP_RI
);
19540 tcg_temp_free(v0_t
);
19544 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19545 int rt
, int rs
, int rd
)
19547 TCGv t0
= tcg_temp_new();
19548 TCGv rs_t
= tcg_temp_new();
19550 gen_load_gpr(rs_t
, rs
);
19555 tcg_gen_movi_tl(t0
, rd
>> 2);
19556 switch (extract32(ctx
->opcode
, 12, 1)) {
19559 gen_helper_shra_qb(t0
, t0
, rs_t
);
19560 gen_store_gpr(t0
, rt
);
19564 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19565 gen_store_gpr(t0
, rt
);
19571 tcg_gen_movi_tl(t0
, rd
>> 1);
19572 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19573 gen_store_gpr(t0
, rt
);
19579 target_long result
;
19580 imm
= extract32(ctx
->opcode
, 13, 8);
19581 result
= (uint32_t)imm
<< 24 |
19582 (uint32_t)imm
<< 16 |
19583 (uint32_t)imm
<< 8 |
19585 result
= (int32_t)result
;
19586 tcg_gen_movi_tl(t0
, result
);
19587 gen_store_gpr(t0
, rt
);
19591 generate_exception_end(ctx
, EXCP_RI
);
19595 tcg_temp_free(rs_t
);
19599 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19601 int rt
= extract32(ctx
->opcode
, 21, 5);
19602 int rs
= extract32(ctx
->opcode
, 16, 5);
19603 int rd
= extract32(ctx
->opcode
, 11, 5);
19605 switch (extract32(ctx
->opcode
, 6, 3)) {
19606 case NM_POOL32AXF_1
:
19608 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19609 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19612 case NM_POOL32AXF_2
:
19614 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19615 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19618 case NM_POOL32AXF_4
:
19620 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19621 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19624 case NM_POOL32AXF_5
:
19625 switch (extract32(ctx
->opcode
, 9, 7)) {
19626 #ifndef CONFIG_USER_ONLY
19628 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19631 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19634 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19637 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19640 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19643 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19646 check_cp0_enabled(ctx
);
19648 TCGv t0
= tcg_temp_new();
19650 save_cpu_state(ctx
, 1);
19651 gen_helper_di(t0
, cpu_env
);
19652 gen_store_gpr(t0
, rt
);
19653 /* Stop translation as we may have switched the execution mode */
19654 ctx
->base
.is_jmp
= DISAS_STOP
;
19659 check_cp0_enabled(ctx
);
19661 TCGv t0
= tcg_temp_new();
19663 save_cpu_state(ctx
, 1);
19664 gen_helper_ei(t0
, cpu_env
);
19665 gen_store_gpr(t0
, rt
);
19666 /* Stop translation as we may have switched the execution mode */
19667 ctx
->base
.is_jmp
= DISAS_STOP
;
19672 gen_load_srsgpr(rs
, rt
);
19675 gen_store_srsgpr(rs
, rt
);
19678 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19681 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19684 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19688 generate_exception_end(ctx
, EXCP_RI
);
19692 case NM_POOL32AXF_7
:
19694 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19695 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19699 generate_exception_end(ctx
, EXCP_RI
);
19704 /* Immediate Value Compact Branches */
19705 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19706 int rt
, int32_t imm
, int32_t offset
)
19709 int bcond_compute
= 0;
19710 TCGv t0
= tcg_temp_new();
19711 TCGv t1
= tcg_temp_new();
19713 gen_load_gpr(t0
, rt
);
19714 tcg_gen_movi_tl(t1
, imm
);
19715 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19717 /* Load needed operands and calculate btarget */
19720 if (rt
== 0 && imm
== 0) {
19721 /* Unconditional branch */
19722 } else if (rt
== 0 && imm
!= 0) {
19727 cond
= TCG_COND_EQ
;
19733 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19734 generate_exception_end(ctx
, EXCP_RI
);
19736 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19737 /* Unconditional branch */
19738 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19742 tcg_gen_shri_tl(t0
, t0
, imm
);
19743 tcg_gen_andi_tl(t0
, t0
, 1);
19744 tcg_gen_movi_tl(t1
, 0);
19746 if (opc
== NM_BBEQZC
) {
19747 cond
= TCG_COND_EQ
;
19749 cond
= TCG_COND_NE
;
19754 if (rt
== 0 && imm
== 0) {
19757 } else if (rt
== 0 && imm
!= 0) {
19758 /* Unconditional branch */
19761 cond
= TCG_COND_NE
;
19765 if (rt
== 0 && imm
== 0) {
19766 /* Unconditional branch */
19769 cond
= TCG_COND_GE
;
19774 cond
= TCG_COND_LT
;
19777 if (rt
== 0 && imm
== 0) {
19778 /* Unconditional branch */
19781 cond
= TCG_COND_GEU
;
19786 cond
= TCG_COND_LTU
;
19789 MIPS_INVAL("Immediate Value Compact branch");
19790 generate_exception_end(ctx
, EXCP_RI
);
19794 /* branch completion */
19795 clear_branch_hflags(ctx
);
19796 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19798 if (bcond_compute
== 0) {
19799 /* Uncoditional compact branch */
19800 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19802 /* Conditional compact branch */
19803 TCGLabel
*fs
= gen_new_label();
19805 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19807 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19810 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19818 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19819 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19822 TCGv t0
= tcg_temp_new();
19823 TCGv t1
= tcg_temp_new();
19826 gen_load_gpr(t0
, rs
);
19830 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19833 /* calculate btarget */
19834 tcg_gen_shli_tl(t0
, t0
, 1);
19835 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19836 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19838 /* branch completion */
19839 clear_branch_hflags(ctx
);
19840 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19842 /* unconditional branch to register */
19843 tcg_gen_mov_tl(cpu_PC
, btarget
);
19844 tcg_gen_lookup_and_goto_ptr();
19850 /* nanoMIPS Branches */
19851 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19852 int rs
, int rt
, int32_t offset
)
19854 int bcond_compute
= 0;
19855 TCGv t0
= tcg_temp_new();
19856 TCGv t1
= tcg_temp_new();
19858 /* Load needed operands and calculate btarget */
19860 /* compact branch */
19863 gen_load_gpr(t0
, rs
);
19864 gen_load_gpr(t1
, rt
);
19866 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19870 if (rs
== 0 || rs
== rt
) {
19871 /* OPC_BLEZALC, OPC_BGEZALC */
19872 /* OPC_BGTZALC, OPC_BLTZALC */
19873 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19875 gen_load_gpr(t0
, rs
);
19876 gen_load_gpr(t1
, rt
);
19878 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19881 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19885 /* OPC_BEQZC, OPC_BNEZC */
19886 gen_load_gpr(t0
, rs
);
19888 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19890 /* OPC_JIC, OPC_JIALC */
19891 TCGv tbase
= tcg_temp_new();
19892 TCGv toffset
= tcg_temp_new();
19894 gen_load_gpr(tbase
, rt
);
19895 tcg_gen_movi_tl(toffset
, offset
);
19896 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19897 tcg_temp_free(tbase
);
19898 tcg_temp_free(toffset
);
19902 MIPS_INVAL("Compact branch/jump");
19903 generate_exception_end(ctx
, EXCP_RI
);
19907 if (bcond_compute
== 0) {
19908 /* Uncoditional compact branch */
19911 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19914 MIPS_INVAL("Compact branch/jump");
19915 generate_exception_end(ctx
, EXCP_RI
);
19919 /* Conditional compact branch */
19920 TCGLabel
*fs
= gen_new_label();
19924 if (rs
== 0 && rt
!= 0) {
19926 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19927 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19932 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
19936 if (rs
== 0 && rt
!= 0) {
19938 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19939 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19941 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19944 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
19948 if (rs
== 0 && rt
!= 0) {
19950 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
19951 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19953 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
19956 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
19960 if (rs
== 0 && rt
!= 0) {
19962 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
19963 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
19965 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
19968 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
19972 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
19975 MIPS_INVAL("Compact conditional branch/jump");
19976 generate_exception_end(ctx
, EXCP_RI
);
19980 /* branch completion */
19981 clear_branch_hflags(ctx
);
19982 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19984 /* Generating branch here as compact branches don't have delay slot */
19985 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19988 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19997 /* nanoMIPS CP1 Branches */
19998 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
19999 int32_t ft
, int32_t offset
)
20001 target_ulong btarget
;
20002 TCGv_i64 t0
= tcg_temp_new_i64();
20004 gen_load_fpr64(ctx
, t0
, ft
);
20005 tcg_gen_andi_i64(t0
, t0
, 1);
20007 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20011 tcg_gen_xori_i64(t0
, t0
, 1);
20012 ctx
->hflags
|= MIPS_HFLAG_BC
;
20015 /* t0 already set */
20016 ctx
->hflags
|= MIPS_HFLAG_BC
;
20019 MIPS_INVAL("cp1 cond branch");
20020 generate_exception_end(ctx
, EXCP_RI
);
20024 tcg_gen_trunc_i64_tl(bcond
, t0
);
20026 ctx
->btarget
= btarget
;
20029 tcg_temp_free_i64(t0
);
20033 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20036 t0
= tcg_temp_new();
20037 t1
= tcg_temp_new();
20039 gen_load_gpr(t0
, rs
);
20040 gen_load_gpr(t1
, rt
);
20042 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20043 /* PP.LSXS instructions require shifting */
20044 switch (extract32(ctx
->opcode
, 7, 4)) {
20049 tcg_gen_shli_tl(t0
, t0
, 1);
20056 tcg_gen_shli_tl(t0
, t0
, 2);
20060 tcg_gen_shli_tl(t0
, t0
, 3);
20064 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20066 switch (extract32(ctx
->opcode
, 7, 4)) {
20068 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20070 gen_store_gpr(t0
, rd
);
20074 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20076 gen_store_gpr(t0
, rd
);
20080 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20082 gen_store_gpr(t0
, rd
);
20085 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20087 gen_store_gpr(t0
, rd
);
20091 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20093 gen_store_gpr(t0
, rd
);
20097 gen_load_gpr(t1
, rd
);
20098 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20104 gen_load_gpr(t1
, rd
);
20105 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20111 gen_load_gpr(t1
, rd
);
20112 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20116 /*case NM_LWC1XS:*/
20118 /*case NM_LDC1XS:*/
20120 /*case NM_SWC1XS:*/
20122 /*case NM_SDC1XS:*/
20123 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20124 check_cp1_enabled(ctx
);
20125 switch (extract32(ctx
->opcode
, 7, 4)) {
20127 /*case NM_LWC1XS:*/
20128 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20131 /*case NM_LDC1XS:*/
20132 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20135 /*case NM_SWC1XS:*/
20136 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20139 /*case NM_SDC1XS:*/
20140 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20144 generate_exception_err(ctx
, EXCP_CpU
, 1);
20148 generate_exception_end(ctx
, EXCP_RI
);
20156 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20160 rt
= extract32(ctx
->opcode
, 21, 5);
20161 rs
= extract32(ctx
->opcode
, 16, 5);
20162 rd
= extract32(ctx
->opcode
, 11, 5);
20164 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20165 generate_exception_end(ctx
, EXCP_RI
);
20168 check_cp1_enabled(ctx
);
20169 switch (extract32(ctx
->opcode
, 0, 3)) {
20171 switch (extract32(ctx
->opcode
, 3, 7)) {
20173 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20176 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20179 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20182 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20185 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20188 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20191 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20194 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20197 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20200 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20203 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20206 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20209 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20212 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20215 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20218 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20221 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20224 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20227 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20230 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20233 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20236 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20239 generate_exception_end(ctx
, EXCP_RI
);
20244 switch (extract32(ctx
->opcode
, 3, 3)) {
20246 switch (extract32(ctx
->opcode
, 9, 1)) {
20248 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20251 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20256 switch (extract32(ctx
->opcode
, 9, 1)) {
20258 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20261 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20266 switch (extract32(ctx
->opcode
, 9, 1)) {
20268 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20271 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20276 switch (extract32(ctx
->opcode
, 9, 1)) {
20278 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20281 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20286 switch (extract32(ctx
->opcode
, 6, 8)) {
20288 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20291 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20294 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20297 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20300 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20303 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20306 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20309 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20312 switch (extract32(ctx
->opcode
, 6, 9)) {
20314 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20317 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20320 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20323 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20326 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20329 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20332 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20335 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20338 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20341 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20344 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20347 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20350 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20353 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20356 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20359 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20362 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20365 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20368 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20371 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20374 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20377 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20380 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20383 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20386 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20389 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20392 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20395 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20398 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20401 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20404 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20407 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20410 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20413 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20416 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20419 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20422 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20425 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20428 generate_exception_end(ctx
, EXCP_RI
);
20437 switch (extract32(ctx
->opcode
, 3, 3)) {
20438 case NM_CMP_CONDN_S
:
20439 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20441 case NM_CMP_CONDN_D
:
20442 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20445 generate_exception_end(ctx
, EXCP_RI
);
20450 generate_exception_end(ctx
, EXCP_RI
);
20455 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20456 int rd
, int rs
, int rt
)
20459 TCGv t0
= tcg_temp_new();
20460 TCGv v1_t
= tcg_temp_new();
20461 TCGv v2_t
= tcg_temp_new();
20463 gen_load_gpr(v1_t
, rs
);
20464 gen_load_gpr(v2_t
, rt
);
20469 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20473 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20477 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20479 case NM_CMPU_EQ_QB
:
20481 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20483 case NM_CMPU_LT_QB
:
20485 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20487 case NM_CMPU_LE_QB
:
20489 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20491 case NM_CMPGU_EQ_QB
:
20493 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20494 gen_store_gpr(v1_t
, ret
);
20496 case NM_CMPGU_LT_QB
:
20498 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20499 gen_store_gpr(v1_t
, ret
);
20501 case NM_CMPGU_LE_QB
:
20503 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20504 gen_store_gpr(v1_t
, ret
);
20506 case NM_CMPGDU_EQ_QB
:
20508 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20509 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20510 gen_store_gpr(v1_t
, ret
);
20512 case NM_CMPGDU_LT_QB
:
20514 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20515 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20516 gen_store_gpr(v1_t
, ret
);
20518 case NM_CMPGDU_LE_QB
:
20520 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20521 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20522 gen_store_gpr(v1_t
, ret
);
20526 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20527 gen_store_gpr(v1_t
, ret
);
20531 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20532 gen_store_gpr(v1_t
, ret
);
20536 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20537 gen_store_gpr(v1_t
, ret
);
20541 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20542 gen_store_gpr(v1_t
, ret
);
20546 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20547 gen_store_gpr(v1_t
, ret
);
20551 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20552 gen_store_gpr(v1_t
, ret
);
20556 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20557 gen_store_gpr(v1_t
, ret
);
20561 switch (extract32(ctx
->opcode
, 10, 1)) {
20564 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20565 gen_store_gpr(v1_t
, ret
);
20569 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20570 gen_store_gpr(v1_t
, ret
);
20574 case NM_ADDQH_R_PH
:
20576 switch (extract32(ctx
->opcode
, 10, 1)) {
20579 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20580 gen_store_gpr(v1_t
, ret
);
20584 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20585 gen_store_gpr(v1_t
, ret
);
20591 switch (extract32(ctx
->opcode
, 10, 1)) {
20594 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20595 gen_store_gpr(v1_t
, ret
);
20599 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20600 gen_store_gpr(v1_t
, ret
);
20606 switch (extract32(ctx
->opcode
, 10, 1)) {
20609 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20610 gen_store_gpr(v1_t
, ret
);
20614 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20615 gen_store_gpr(v1_t
, ret
);
20621 switch (extract32(ctx
->opcode
, 10, 1)) {
20624 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20625 gen_store_gpr(v1_t
, ret
);
20629 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20630 gen_store_gpr(v1_t
, ret
);
20634 case NM_ADDUH_R_QB
:
20636 switch (extract32(ctx
->opcode
, 10, 1)) {
20639 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20640 gen_store_gpr(v1_t
, ret
);
20644 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20645 gen_store_gpr(v1_t
, ret
);
20649 case NM_SHRAV_R_PH
:
20651 switch (extract32(ctx
->opcode
, 10, 1)) {
20654 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20655 gen_store_gpr(v1_t
, ret
);
20659 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20660 gen_store_gpr(v1_t
, ret
);
20664 case NM_SHRAV_R_QB
:
20666 switch (extract32(ctx
->opcode
, 10, 1)) {
20669 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20670 gen_store_gpr(v1_t
, ret
);
20674 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20675 gen_store_gpr(v1_t
, ret
);
20681 switch (extract32(ctx
->opcode
, 10, 1)) {
20684 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20685 gen_store_gpr(v1_t
, ret
);
20689 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20690 gen_store_gpr(v1_t
, ret
);
20694 case NM_SUBQH_R_PH
:
20696 switch (extract32(ctx
->opcode
, 10, 1)) {
20699 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20700 gen_store_gpr(v1_t
, ret
);
20704 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20705 gen_store_gpr(v1_t
, ret
);
20711 switch (extract32(ctx
->opcode
, 10, 1)) {
20714 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20715 gen_store_gpr(v1_t
, ret
);
20719 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20720 gen_store_gpr(v1_t
, ret
);
20726 switch (extract32(ctx
->opcode
, 10, 1)) {
20729 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20730 gen_store_gpr(v1_t
, ret
);
20734 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20735 gen_store_gpr(v1_t
, ret
);
20741 switch (extract32(ctx
->opcode
, 10, 1)) {
20744 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20745 gen_store_gpr(v1_t
, ret
);
20749 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20750 gen_store_gpr(v1_t
, ret
);
20754 case NM_SUBUH_R_QB
:
20756 switch (extract32(ctx
->opcode
, 10, 1)) {
20759 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20760 gen_store_gpr(v1_t
, ret
);
20764 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20765 gen_store_gpr(v1_t
, ret
);
20769 case NM_SHLLV_S_PH
:
20771 switch (extract32(ctx
->opcode
, 10, 1)) {
20774 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20775 gen_store_gpr(v1_t
, ret
);
20779 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20780 gen_store_gpr(v1_t
, ret
);
20784 case NM_PRECR_SRA_R_PH_W
:
20786 switch (extract32(ctx
->opcode
, 10, 1)) {
20788 /* PRECR_SRA_PH_W */
20790 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20791 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20793 gen_store_gpr(v1_t
, rt
);
20794 tcg_temp_free_i32(sa_t
);
20798 /* PRECR_SRA_R_PH_W */
20800 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20801 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20803 gen_store_gpr(v1_t
, rt
);
20804 tcg_temp_free_i32(sa_t
);
20809 case NM_MULEU_S_PH_QBL
:
20811 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20812 gen_store_gpr(v1_t
, ret
);
20814 case NM_MULEU_S_PH_QBR
:
20816 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20817 gen_store_gpr(v1_t
, ret
);
20819 case NM_MULQ_RS_PH
:
20821 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20822 gen_store_gpr(v1_t
, ret
);
20826 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20827 gen_store_gpr(v1_t
, ret
);
20831 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20832 gen_store_gpr(v1_t
, ret
);
20836 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20837 gen_store_gpr(v1_t
, ret
);
20841 gen_load_gpr(t0
, rs
);
20843 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20845 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20849 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20850 gen_store_gpr(v1_t
, ret
);
20854 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20855 gen_store_gpr(v1_t
, ret
);
20859 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20860 gen_store_gpr(v1_t
, ret
);
20864 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20865 gen_store_gpr(v1_t
, ret
);
20869 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20870 gen_store_gpr(v1_t
, ret
);
20874 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20875 gen_store_gpr(v1_t
, ret
);
20880 TCGv tv0
= tcg_temp_new();
20881 TCGv tv1
= tcg_temp_new();
20882 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20884 tcg_gen_movi_tl(tv0
, rd
>> 3);
20885 tcg_gen_movi_tl(tv1
, imm
);
20886 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20889 case NM_MULEQ_S_W_PHL
:
20891 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20892 gen_store_gpr(v1_t
, ret
);
20894 case NM_MULEQ_S_W_PHR
:
20896 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20897 gen_store_gpr(v1_t
, ret
);
20901 switch (extract32(ctx
->opcode
, 10, 1)) {
20904 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20905 gen_store_gpr(v1_t
, ret
);
20909 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20910 gen_store_gpr(v1_t
, ret
);
20914 case NM_PRECR_QB_PH
:
20916 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
20917 gen_store_gpr(v1_t
, ret
);
20919 case NM_PRECRQ_QB_PH
:
20921 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
20922 gen_store_gpr(v1_t
, ret
);
20924 case NM_PRECRQ_PH_W
:
20926 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
20927 gen_store_gpr(v1_t
, ret
);
20929 case NM_PRECRQ_RS_PH_W
:
20931 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20932 gen_store_gpr(v1_t
, ret
);
20934 case NM_PRECRQU_S_QB_PH
:
20936 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20937 gen_store_gpr(v1_t
, ret
);
20941 tcg_gen_movi_tl(t0
, rd
);
20942 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
20943 gen_store_gpr(v1_t
, rt
);
20947 tcg_gen_movi_tl(t0
, rd
>> 1);
20948 switch (extract32(ctx
->opcode
, 10, 1)) {
20951 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
20952 gen_store_gpr(v1_t
, rt
);
20956 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
20957 gen_store_gpr(v1_t
, rt
);
20963 tcg_gen_movi_tl(t0
, rd
>> 1);
20964 switch (extract32(ctx
->opcode
, 10, 2)) {
20967 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
20968 gen_store_gpr(v1_t
, rt
);
20972 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
20973 gen_store_gpr(v1_t
, rt
);
20976 generate_exception_end(ctx
, EXCP_RI
);
20982 tcg_gen_movi_tl(t0
, rd
);
20983 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
20984 gen_store_gpr(v1_t
, rt
);
20990 imm
= sextract32(ctx
->opcode
, 11, 11);
20991 imm
= (int16_t)(imm
<< 6) >> 6;
20993 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
20998 generate_exception_end(ctx
, EXCP_RI
);
21003 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21011 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
21012 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
21014 rt
= extract32(ctx
->opcode
, 21, 5);
21015 rs
= extract32(ctx
->opcode
, 16, 5);
21016 rd
= extract32(ctx
->opcode
, 11, 5);
21018 op
= extract32(ctx
->opcode
, 26, 6);
21023 switch (extract32(ctx
->opcode
, 19, 2)) {
21026 generate_exception_end(ctx
, EXCP_RI
);
21029 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
21030 generate_exception_end(ctx
, EXCP_SYSCALL
);
21032 generate_exception_end(ctx
, EXCP_RI
);
21036 generate_exception_end(ctx
, EXCP_BREAK
);
21039 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21040 gen_helper_do_semihosting(cpu_env
);
21042 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21043 generate_exception_end(ctx
, EXCP_RI
);
21045 generate_exception_end(ctx
, EXCP_DBp
);
21052 imm
= extract32(ctx
->opcode
, 0, 16);
21054 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21056 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21058 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21063 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21064 extract32(ctx
->opcode
, 1, 20) << 1;
21065 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21066 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21070 switch (ctx
->opcode
& 0x07) {
21072 gen_pool32a0_nanomips_insn(env
, ctx
);
21076 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21077 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21081 switch (extract32(ctx
->opcode
, 3, 3)) {
21083 gen_p_lsx(ctx
, rd
, rs
, rt
);
21086 /* In nanoMIPS, the shift field directly encodes the shift
21087 * amount, meaning that the supported shift values are in
21088 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
21089 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21090 extract32(ctx
->opcode
, 9, 2) - 1);
21093 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21096 gen_pool32axf_nanomips_insn(env
, ctx
);
21099 generate_exception_end(ctx
, EXCP_RI
);
21104 generate_exception_end(ctx
, EXCP_RI
);
21109 switch (ctx
->opcode
& 0x03) {
21112 offset
= extract32(ctx
->opcode
, 0, 21);
21113 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21117 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21120 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21123 generate_exception_end(ctx
, EXCP_RI
);
21129 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21130 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21131 switch (extract32(ctx
->opcode
, 16, 5)) {
21135 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21141 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21142 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21148 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21154 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21157 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21164 t0
= tcg_temp_new();
21166 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21169 tcg_gen_movi_tl(t0
, addr
);
21170 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21178 t0
= tcg_temp_new();
21179 t1
= tcg_temp_new();
21181 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21184 tcg_gen_movi_tl(t0
, addr
);
21185 gen_load_gpr(t1
, rt
);
21187 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21194 generate_exception_end(ctx
, EXCP_RI
);
21200 switch (extract32(ctx
->opcode
, 12, 4)) {
21202 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21205 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21208 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21211 switch (extract32(ctx
->opcode
, 20, 1)) {
21213 switch (ctx
->opcode
& 3) {
21215 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21216 extract32(ctx
->opcode
, 2, 1),
21217 extract32(ctx
->opcode
, 3, 9) << 3);
21220 case NM_RESTORE_JRC
:
21221 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21222 extract32(ctx
->opcode
, 2, 1),
21223 extract32(ctx
->opcode
, 3, 9) << 3);
21224 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21225 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21229 generate_exception_end(ctx
, EXCP_RI
);
21234 generate_exception_end(ctx
, EXCP_RI
);
21239 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21242 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21246 TCGv t0
= tcg_temp_new();
21248 imm
= extract32(ctx
->opcode
, 0, 12);
21249 gen_load_gpr(t0
, rs
);
21250 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21251 gen_store_gpr(t0
, rt
);
21257 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21258 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21262 int shift
= extract32(ctx
->opcode
, 0, 5);
21263 switch (extract32(ctx
->opcode
, 5, 4)) {
21265 if (rt
== 0 && shift
== 0) {
21267 } else if (rt
== 0 && shift
== 3) {
21268 /* EHB - treat as NOP */
21269 } else if (rt
== 0 && shift
== 5) {
21270 /* PAUSE - treat as NOP */
21271 } else if (rt
== 0 && shift
== 6) {
21273 gen_sync(extract32(ctx
->opcode
, 16, 5));
21276 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21277 extract32(ctx
->opcode
, 0, 5));
21281 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21282 extract32(ctx
->opcode
, 0, 5));
21285 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21286 extract32(ctx
->opcode
, 0, 5));
21289 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21290 extract32(ctx
->opcode
, 0, 5));
21298 TCGv t0
= tcg_temp_new();
21299 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21300 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21302 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21304 gen_load_gpr(t0
, rs
);
21305 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21308 tcg_temp_free_i32(shift
);
21309 tcg_temp_free_i32(shiftx
);
21310 tcg_temp_free_i32(stripe
);
21314 switch (((ctx
->opcode
>> 10) & 2) |
21315 (extract32(ctx
->opcode
, 5, 1))) {
21318 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21319 extract32(ctx
->opcode
, 6, 5));
21322 generate_exception_end(ctx
, EXCP_RI
);
21327 switch (((ctx
->opcode
>> 10) & 2) |
21328 (extract32(ctx
->opcode
, 5, 1))) {
21331 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21332 extract32(ctx
->opcode
, 6, 5));
21335 generate_exception_end(ctx
, EXCP_RI
);
21340 generate_exception_end(ctx
, EXCP_RI
);
21345 gen_pool32f_nanomips_insn(ctx
);
21350 switch (extract32(ctx
->opcode
, 1, 1)) {
21353 tcg_gen_movi_tl(cpu_gpr
[rt
],
21354 sextract32(ctx
->opcode
, 0, 1) << 31 |
21355 extract32(ctx
->opcode
, 2, 10) << 21 |
21356 extract32(ctx
->opcode
, 12, 9) << 12);
21361 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21362 extract32(ctx
->opcode
, 2, 10) << 21 |
21363 extract32(ctx
->opcode
, 12, 9) << 12;
21365 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21366 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21373 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21375 switch (extract32(ctx
->opcode
, 18, 3)) {
21377 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21380 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21383 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21387 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21392 switch (ctx
->opcode
& 1) {
21394 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21397 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21403 switch (ctx
->opcode
& 1) {
21405 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21408 generate_exception_end(ctx
, EXCP_RI
);
21414 switch (ctx
->opcode
& 0x3) {
21416 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21419 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21422 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21425 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21430 generate_exception_end(ctx
, EXCP_RI
);
21437 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21439 switch (extract32(ctx
->opcode
, 12, 4)) {
21443 /* Break the TB to be able to sync copied instructions
21445 ctx
->base
.is_jmp
= DISAS_STOP
;
21448 /* Treat as NOP. */
21452 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21455 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21458 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21461 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21464 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21467 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21470 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21473 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21476 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21479 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21482 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21485 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21488 generate_exception_end(ctx
, EXCP_RI
);
21495 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21496 extract32(ctx
->opcode
, 0, 8);
21498 switch (extract32(ctx
->opcode
, 8, 3)) {
21500 switch (extract32(ctx
->opcode
, 11, 4)) {
21502 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21505 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21508 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21511 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21514 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21517 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21520 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21523 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21526 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21529 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21532 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21535 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21540 /* Break the TB to be able to sync copied instructions
21542 ctx
->base
.is_jmp
= DISAS_STOP
;
21545 /* Treat as NOP. */
21549 generate_exception_end(ctx
, EXCP_RI
);
21554 switch (extract32(ctx
->opcode
, 11, 4)) {
21559 TCGv t0
= tcg_temp_new();
21560 TCGv t1
= tcg_temp_new();
21562 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21564 switch (extract32(ctx
->opcode
, 11, 4)) {
21566 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21568 gen_store_gpr(t0
, rt
);
21571 gen_load_gpr(t1
, rt
);
21572 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21581 switch (ctx
->opcode
& 0x03) {
21583 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21587 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21592 switch (ctx
->opcode
& 0x03) {
21594 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21598 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21604 check_cp0_enabled(ctx
);
21605 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21606 gen_cache_operation(ctx
, rt
, rs
, s
);
21612 switch (extract32(ctx
->opcode
, 11, 4)) {
21615 check_cp0_enabled(ctx
);
21616 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21620 check_cp0_enabled(ctx
);
21621 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21625 check_cp0_enabled(ctx
);
21626 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21630 /* case NM_SYNCIE */
21632 check_cp0_enabled(ctx
);
21633 /* Break the TB to be able to sync copied instructions
21635 ctx
->base
.is_jmp
= DISAS_STOP
;
21637 /* case NM_PREFE */
21639 check_cp0_enabled(ctx
);
21640 /* Treat as NOP. */
21645 check_cp0_enabled(ctx
);
21646 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21650 check_cp0_enabled(ctx
);
21651 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21655 check_cp0_enabled(ctx
);
21656 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21659 check_nms_dl_il_sl_tl_l2c(ctx
);
21660 gen_cache_operation(ctx
, rt
, rs
, s
);
21664 check_cp0_enabled(ctx
);
21665 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21669 check_cp0_enabled(ctx
);
21670 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21673 switch (extract32(ctx
->opcode
, 2, 2)) {
21677 check_cp0_enabled(ctx
);
21678 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21683 check_cp0_enabled(ctx
);
21684 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21687 generate_exception_end(ctx
, EXCP_RI
);
21692 switch (extract32(ctx
->opcode
, 2, 2)) {
21696 check_cp0_enabled(ctx
);
21697 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21702 check_cp0_enabled(ctx
);
21703 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21707 generate_exception_end(ctx
, EXCP_RI
);
21717 int count
= extract32(ctx
->opcode
, 12, 3);
21720 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21721 extract32(ctx
->opcode
, 0, 8);
21722 TCGv va
= tcg_temp_new();
21723 TCGv t1
= tcg_temp_new();
21724 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21725 NM_P_LS_UAWM
? MO_UNALN
: 0;
21727 count
= (count
== 0) ? 8 : count
;
21728 while (counter
!= count
) {
21729 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21730 int this_offset
= offset
+ (counter
<< 2);
21732 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21734 switch (extract32(ctx
->opcode
, 11, 1)) {
21736 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21738 gen_store_gpr(t1
, this_rt
);
21739 if ((this_rt
== rs
) &&
21740 (counter
!= (count
- 1))) {
21741 /* UNPREDICTABLE */
21745 this_rt
= (rt
== 0) ? 0 : this_rt
;
21746 gen_load_gpr(t1
, this_rt
);
21747 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21758 generate_exception_end(ctx
, EXCP_RI
);
21766 TCGv t0
= tcg_temp_new();
21767 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21768 extract32(ctx
->opcode
, 1, 20) << 1;
21769 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21770 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21771 extract32(ctx
->opcode
, 21, 3));
21772 gen_load_gpr(t0
, rt
);
21773 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21774 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21780 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21781 extract32(ctx
->opcode
, 1, 24) << 1;
21783 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21785 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21788 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21793 switch (extract32(ctx
->opcode
, 12, 4)) {
21796 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21799 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21802 generate_exception_end(ctx
, EXCP_RI
);
21808 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21809 extract32(ctx
->opcode
, 1, 13) << 1;
21810 switch (extract32(ctx
->opcode
, 14, 2)) {
21813 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21816 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21817 extract32(ctx
->opcode
, 1, 13) << 1;
21818 check_cp1_enabled(ctx
);
21819 switch (extract32(ctx
->opcode
, 16, 5)) {
21821 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21824 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21829 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21830 extract32(ctx
->opcode
, 0, 1) << 13;
21832 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21837 generate_exception_end(ctx
, EXCP_RI
);
21843 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21845 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21849 if (rs
== rt
|| rt
== 0) {
21850 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21851 } else if (rs
== 0) {
21852 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21854 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21862 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21863 extract32(ctx
->opcode
, 1, 13) << 1;
21864 switch (extract32(ctx
->opcode
, 14, 2)) {
21867 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21870 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21872 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21874 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21878 if (rs
== 0 || rs
== rt
) {
21880 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21882 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21886 generate_exception_end(ctx
, EXCP_RI
);
21893 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21894 extract32(ctx
->opcode
, 1, 10) << 1;
21895 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21897 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
21902 generate_exception_end(ctx
, EXCP_RI
);
21908 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21911 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
21912 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
21913 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
21917 /* make sure instructions are on a halfword boundary */
21918 if (ctx
->base
.pc_next
& 0x1) {
21919 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
21920 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
21921 tcg_temp_free(tmp
);
21922 generate_exception_end(ctx
, EXCP_AdEL
);
21926 op
= extract32(ctx
->opcode
, 10, 6);
21929 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
21932 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
21933 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
21936 switch (extract32(ctx
->opcode
, 3, 2)) {
21937 case NM_P16_SYSCALL
:
21938 if (extract32(ctx
->opcode
, 2, 1) == 0) {
21939 generate_exception_end(ctx
, EXCP_SYSCALL
);
21941 generate_exception_end(ctx
, EXCP_RI
);
21945 generate_exception_end(ctx
, EXCP_BREAK
);
21948 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
21949 gen_helper_do_semihosting(cpu_env
);
21951 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21952 generate_exception_end(ctx
, EXCP_RI
);
21954 generate_exception_end(ctx
, EXCP_DBp
);
21959 generate_exception_end(ctx
, EXCP_RI
);
21966 int shift
= extract32(ctx
->opcode
, 0, 3);
21968 shift
= (shift
== 0) ? 8 : shift
;
21970 switch (extract32(ctx
->opcode
, 3, 1)) {
21978 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
21982 switch (ctx
->opcode
& 1) {
21984 gen_pool16c_nanomips_insn(ctx
);
21987 gen_ldxs(ctx
, rt
, rs
, rd
);
21992 switch (extract32(ctx
->opcode
, 6, 1)) {
21994 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
21995 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
21998 generate_exception_end(ctx
, EXCP_RI
);
22003 switch (extract32(ctx
->opcode
, 3, 1)) {
22005 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
22006 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
22008 case NM_P_ADDIURS5
:
22009 rt
= extract32(ctx
->opcode
, 5, 5);
22011 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
22012 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
22013 (extract32(ctx
->opcode
, 0, 3));
22014 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
22020 switch (ctx
->opcode
& 0x1) {
22022 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
22025 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
22030 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22031 extract32(ctx
->opcode
, 5, 3);
22032 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22033 extract32(ctx
->opcode
, 0, 3);
22034 rt
= decode_gpr_gpr4(rt
);
22035 rs
= decode_gpr_gpr4(rs
);
22036 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22037 (extract32(ctx
->opcode
, 3, 1))) {
22040 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22044 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22047 generate_exception_end(ctx
, EXCP_RI
);
22053 int imm
= extract32(ctx
->opcode
, 0, 7);
22054 imm
= (imm
== 0x7f ? -1 : imm
);
22056 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22062 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22063 u
= (u
== 12) ? 0xff :
22064 (u
== 13) ? 0xffff : u
;
22065 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22069 offset
= extract32(ctx
->opcode
, 0, 2);
22070 switch (extract32(ctx
->opcode
, 2, 2)) {
22072 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22075 rt
= decode_gpr_gpr3_src_store(
22076 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22077 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22080 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22083 generate_exception_end(ctx
, EXCP_RI
);
22088 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22089 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22091 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22094 rt
= decode_gpr_gpr3_src_store(
22095 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22096 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22099 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22102 generate_exception_end(ctx
, EXCP_RI
);
22107 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22108 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22111 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22112 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22113 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22117 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22118 extract32(ctx
->opcode
, 5, 3);
22119 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22120 extract32(ctx
->opcode
, 0, 3);
22121 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22122 (extract32(ctx
->opcode
, 8, 1) << 2);
22123 rt
= decode_gpr_gpr4(rt
);
22124 rs
= decode_gpr_gpr4(rs
);
22125 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22129 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22130 extract32(ctx
->opcode
, 5, 3);
22131 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22132 extract32(ctx
->opcode
, 0, 3);
22133 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22134 (extract32(ctx
->opcode
, 8, 1) << 2);
22135 rt
= decode_gpr_gpr4_zero(rt
);
22136 rs
= decode_gpr_gpr4(rs
);
22137 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22140 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22141 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22144 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22145 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22146 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22149 rt
= decode_gpr_gpr3_src_store(
22150 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22151 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22152 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22153 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22156 rt
= decode_gpr_gpr3_src_store(
22157 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22158 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22159 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22162 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22163 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22164 (extract32(ctx
->opcode
, 1, 9) << 1));
22167 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22168 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22169 (extract32(ctx
->opcode
, 1, 9) << 1));
22172 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22173 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22174 (extract32(ctx
->opcode
, 1, 6) << 1));
22177 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22178 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22179 (extract32(ctx
->opcode
, 1, 6) << 1));
22182 switch (ctx
->opcode
& 0xf) {
22185 switch (extract32(ctx
->opcode
, 4, 1)) {
22187 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22188 extract32(ctx
->opcode
, 5, 5), 0, 0);
22191 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22192 extract32(ctx
->opcode
, 5, 5), 31, 0);
22199 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22200 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22201 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22202 extract32(ctx
->opcode
, 0, 4) << 1);
22209 int count
= extract32(ctx
->opcode
, 0, 4);
22210 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22212 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22213 switch (extract32(ctx
->opcode
, 8, 1)) {
22215 gen_save(ctx
, rt
, count
, 0, u
);
22217 case NM_RESTORE_JRC16
:
22218 gen_restore(ctx
, rt
, count
, 0, u
);
22219 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22228 static const int gpr2reg1
[] = {4, 5, 6, 7};
22229 static const int gpr2reg2
[] = {5, 6, 7, 8};
22231 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22232 extract32(ctx
->opcode
, 8, 1);
22233 int r1
= gpr2reg1
[rd2
];
22234 int r2
= gpr2reg2
[rd2
];
22235 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22236 extract32(ctx
->opcode
, 0, 3);
22237 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22238 extract32(ctx
->opcode
, 5, 3);
22239 TCGv t0
= tcg_temp_new();
22240 TCGv t1
= tcg_temp_new();
22241 if (op
== NM_MOVEP
) {
22244 rs
= decode_gpr_gpr4_zero(r3
);
22245 rt
= decode_gpr_gpr4_zero(r4
);
22247 rd
= decode_gpr_gpr4(r3
);
22248 re
= decode_gpr_gpr4(r4
);
22252 gen_load_gpr(t0
, rs
);
22253 gen_load_gpr(t1
, rt
);
22254 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22255 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22261 return decode_nanomips_32_48_opc(env
, ctx
);
22268 /* SmartMIPS extension to MIPS32 */
22270 #if defined(TARGET_MIPS64)
22272 /* MDMX extension to MIPS64 */
22276 /* MIPSDSP functions. */
22277 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22278 int rd
, int base
, int offset
)
22283 t0
= tcg_temp_new();
22286 gen_load_gpr(t0
, offset
);
22287 } else if (offset
== 0) {
22288 gen_load_gpr(t0
, base
);
22290 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22295 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22296 gen_store_gpr(t0
, rd
);
22299 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22300 gen_store_gpr(t0
, rd
);
22303 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22304 gen_store_gpr(t0
, rd
);
22306 #if defined(TARGET_MIPS64)
22308 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22309 gen_store_gpr(t0
, rd
);
22316 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22317 int ret
, int v1
, int v2
)
22323 /* Treat as NOP. */
22327 v1_t
= tcg_temp_new();
22328 v2_t
= tcg_temp_new();
22330 gen_load_gpr(v1_t
, v1
);
22331 gen_load_gpr(v2_t
, v2
);
22334 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22335 case OPC_MULT_G_2E
:
22339 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22341 case OPC_ADDUH_R_QB
:
22342 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22345 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22347 case OPC_ADDQH_R_PH
:
22348 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22351 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22353 case OPC_ADDQH_R_W
:
22354 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22357 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22359 case OPC_SUBUH_R_QB
:
22360 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22363 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22365 case OPC_SUBQH_R_PH
:
22366 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22369 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22371 case OPC_SUBQH_R_W
:
22372 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22376 case OPC_ABSQ_S_PH_DSP
:
22378 case OPC_ABSQ_S_QB
:
22380 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22382 case OPC_ABSQ_S_PH
:
22384 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22388 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22390 case OPC_PRECEQ_W_PHL
:
22392 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22393 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22395 case OPC_PRECEQ_W_PHR
:
22397 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22398 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22399 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22401 case OPC_PRECEQU_PH_QBL
:
22403 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22405 case OPC_PRECEQU_PH_QBR
:
22407 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22409 case OPC_PRECEQU_PH_QBLA
:
22411 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22413 case OPC_PRECEQU_PH_QBRA
:
22415 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22417 case OPC_PRECEU_PH_QBL
:
22419 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22421 case OPC_PRECEU_PH_QBR
:
22423 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22425 case OPC_PRECEU_PH_QBLA
:
22427 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22429 case OPC_PRECEU_PH_QBRA
:
22431 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22435 case OPC_ADDU_QB_DSP
:
22439 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22441 case OPC_ADDQ_S_PH
:
22443 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22447 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22451 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22453 case OPC_ADDU_S_QB
:
22455 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22459 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22461 case OPC_ADDU_S_PH
:
22463 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22467 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22469 case OPC_SUBQ_S_PH
:
22471 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22475 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22479 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22481 case OPC_SUBU_S_QB
:
22483 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22487 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22489 case OPC_SUBU_S_PH
:
22491 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22495 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22499 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22503 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22505 case OPC_RADDU_W_QB
:
22507 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22511 case OPC_CMPU_EQ_QB_DSP
:
22513 case OPC_PRECR_QB_PH
:
22515 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22517 case OPC_PRECRQ_QB_PH
:
22519 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22521 case OPC_PRECR_SRA_PH_W
:
22524 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22525 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22527 tcg_temp_free_i32(sa_t
);
22530 case OPC_PRECR_SRA_R_PH_W
:
22533 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22534 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22536 tcg_temp_free_i32(sa_t
);
22539 case OPC_PRECRQ_PH_W
:
22541 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22543 case OPC_PRECRQ_RS_PH_W
:
22545 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22547 case OPC_PRECRQU_S_QB_PH
:
22549 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22553 #ifdef TARGET_MIPS64
22554 case OPC_ABSQ_S_QH_DSP
:
22556 case OPC_PRECEQ_L_PWL
:
22558 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22560 case OPC_PRECEQ_L_PWR
:
22562 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22564 case OPC_PRECEQ_PW_QHL
:
22566 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22568 case OPC_PRECEQ_PW_QHR
:
22570 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22572 case OPC_PRECEQ_PW_QHLA
:
22574 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22576 case OPC_PRECEQ_PW_QHRA
:
22578 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22580 case OPC_PRECEQU_QH_OBL
:
22582 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22584 case OPC_PRECEQU_QH_OBR
:
22586 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22588 case OPC_PRECEQU_QH_OBLA
:
22590 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22592 case OPC_PRECEQU_QH_OBRA
:
22594 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22596 case OPC_PRECEU_QH_OBL
:
22598 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22600 case OPC_PRECEU_QH_OBR
:
22602 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22604 case OPC_PRECEU_QH_OBLA
:
22606 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22608 case OPC_PRECEU_QH_OBRA
:
22610 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22612 case OPC_ABSQ_S_OB
:
22614 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22616 case OPC_ABSQ_S_PW
:
22618 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22620 case OPC_ABSQ_S_QH
:
22622 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22626 case OPC_ADDU_OB_DSP
:
22628 case OPC_RADDU_L_OB
:
22630 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22634 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22636 case OPC_SUBQ_S_PW
:
22638 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22642 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22644 case OPC_SUBQ_S_QH
:
22646 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22650 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22652 case OPC_SUBU_S_OB
:
22654 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22658 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22660 case OPC_SUBU_S_QH
:
22662 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22666 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22668 case OPC_SUBUH_R_OB
:
22670 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22674 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22676 case OPC_ADDQ_S_PW
:
22678 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22682 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22684 case OPC_ADDQ_S_QH
:
22686 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22690 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22692 case OPC_ADDU_S_OB
:
22694 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22698 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22700 case OPC_ADDU_S_QH
:
22702 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22706 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22708 case OPC_ADDUH_R_OB
:
22710 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22714 case OPC_CMPU_EQ_OB_DSP
:
22716 case OPC_PRECR_OB_QH
:
22718 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22720 case OPC_PRECR_SRA_QH_PW
:
22723 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22724 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22725 tcg_temp_free_i32(ret_t
);
22728 case OPC_PRECR_SRA_R_QH_PW
:
22731 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22732 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22733 tcg_temp_free_i32(sa_v
);
22736 case OPC_PRECRQ_OB_QH
:
22738 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22740 case OPC_PRECRQ_PW_L
:
22742 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22744 case OPC_PRECRQ_QH_PW
:
22746 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22748 case OPC_PRECRQ_RS_QH_PW
:
22750 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22752 case OPC_PRECRQU_S_OB_QH
:
22754 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22761 tcg_temp_free(v1_t
);
22762 tcg_temp_free(v2_t
);
22765 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22766 int ret
, int v1
, int v2
)
22774 /* Treat as NOP. */
22778 t0
= tcg_temp_new();
22779 v1_t
= tcg_temp_new();
22780 v2_t
= tcg_temp_new();
22782 tcg_gen_movi_tl(t0
, v1
);
22783 gen_load_gpr(v1_t
, v1
);
22784 gen_load_gpr(v2_t
, v2
);
22787 case OPC_SHLL_QB_DSP
:
22789 op2
= MASK_SHLL_QB(ctx
->opcode
);
22793 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22797 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22801 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22805 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22807 case OPC_SHLL_S_PH
:
22809 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22811 case OPC_SHLLV_S_PH
:
22813 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22817 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22819 case OPC_SHLLV_S_W
:
22821 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22825 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22829 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22833 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22837 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22841 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22843 case OPC_SHRA_R_QB
:
22845 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22849 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22851 case OPC_SHRAV_R_QB
:
22853 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22857 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22859 case OPC_SHRA_R_PH
:
22861 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22865 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22867 case OPC_SHRAV_R_PH
:
22869 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22873 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22875 case OPC_SHRAV_R_W
:
22877 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22879 default: /* Invalid */
22880 MIPS_INVAL("MASK SHLL.QB");
22881 generate_exception_end(ctx
, EXCP_RI
);
22886 #ifdef TARGET_MIPS64
22887 case OPC_SHLL_OB_DSP
:
22888 op2
= MASK_SHLL_OB(ctx
->opcode
);
22892 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22896 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22898 case OPC_SHLL_S_PW
:
22900 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22902 case OPC_SHLLV_S_PW
:
22904 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22908 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22912 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22916 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22920 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22922 case OPC_SHLL_S_QH
:
22924 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22926 case OPC_SHLLV_S_QH
:
22928 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22932 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
22936 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22938 case OPC_SHRA_R_OB
:
22940 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
22942 case OPC_SHRAV_R_OB
:
22944 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22948 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
22952 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22954 case OPC_SHRA_R_PW
:
22956 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
22958 case OPC_SHRAV_R_PW
:
22960 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
22964 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
22968 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22970 case OPC_SHRA_R_QH
:
22972 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
22974 case OPC_SHRAV_R_QH
:
22976 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22980 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
22984 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
22988 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
22992 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
22994 default: /* Invalid */
22995 MIPS_INVAL("MASK SHLL.OB");
22996 generate_exception_end(ctx
, EXCP_RI
);
23004 tcg_temp_free(v1_t
);
23005 tcg_temp_free(v2_t
);
23008 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23009 int ret
, int v1
, int v2
, int check_ret
)
23015 if ((ret
== 0) && (check_ret
== 1)) {
23016 /* Treat as NOP. */
23020 t0
= tcg_temp_new_i32();
23021 v1_t
= tcg_temp_new();
23022 v2_t
= tcg_temp_new();
23024 tcg_gen_movi_i32(t0
, ret
);
23025 gen_load_gpr(v1_t
, v1
);
23026 gen_load_gpr(v2_t
, v2
);
23029 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23030 * the same mask and op1. */
23031 case OPC_MULT_G_2E
:
23035 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23038 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23041 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23043 case OPC_MULQ_RS_W
:
23044 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23048 case OPC_DPA_W_PH_DSP
:
23050 case OPC_DPAU_H_QBL
:
23052 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23054 case OPC_DPAU_H_QBR
:
23056 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23058 case OPC_DPSU_H_QBL
:
23060 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23062 case OPC_DPSU_H_QBR
:
23064 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23068 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23070 case OPC_DPAX_W_PH
:
23072 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23074 case OPC_DPAQ_S_W_PH
:
23076 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23078 case OPC_DPAQX_S_W_PH
:
23080 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23082 case OPC_DPAQX_SA_W_PH
:
23084 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23088 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23090 case OPC_DPSX_W_PH
:
23092 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23094 case OPC_DPSQ_S_W_PH
:
23096 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23098 case OPC_DPSQX_S_W_PH
:
23100 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23102 case OPC_DPSQX_SA_W_PH
:
23104 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23106 case OPC_MULSAQ_S_W_PH
:
23108 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23110 case OPC_DPAQ_SA_L_W
:
23112 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23114 case OPC_DPSQ_SA_L_W
:
23116 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23118 case OPC_MAQ_S_W_PHL
:
23120 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23122 case OPC_MAQ_S_W_PHR
:
23124 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23126 case OPC_MAQ_SA_W_PHL
:
23128 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23130 case OPC_MAQ_SA_W_PHR
:
23132 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23134 case OPC_MULSA_W_PH
:
23136 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23140 #ifdef TARGET_MIPS64
23141 case OPC_DPAQ_W_QH_DSP
:
23143 int ac
= ret
& 0x03;
23144 tcg_gen_movi_i32(t0
, ac
);
23149 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23153 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23157 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23161 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23165 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23167 case OPC_DPAQ_S_W_QH
:
23169 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23171 case OPC_DPAQ_SA_L_PW
:
23173 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23175 case OPC_DPAU_H_OBL
:
23177 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23179 case OPC_DPAU_H_OBR
:
23181 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23185 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23187 case OPC_DPSQ_S_W_QH
:
23189 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23191 case OPC_DPSQ_SA_L_PW
:
23193 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23195 case OPC_DPSU_H_OBL
:
23197 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23199 case OPC_DPSU_H_OBR
:
23201 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23203 case OPC_MAQ_S_L_PWL
:
23205 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23207 case OPC_MAQ_S_L_PWR
:
23209 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23211 case OPC_MAQ_S_W_QHLL
:
23213 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23215 case OPC_MAQ_SA_W_QHLL
:
23217 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23219 case OPC_MAQ_S_W_QHLR
:
23221 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23223 case OPC_MAQ_SA_W_QHLR
:
23225 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23227 case OPC_MAQ_S_W_QHRL
:
23229 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23231 case OPC_MAQ_SA_W_QHRL
:
23233 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23235 case OPC_MAQ_S_W_QHRR
:
23237 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23239 case OPC_MAQ_SA_W_QHRR
:
23241 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23243 case OPC_MULSAQ_S_L_PW
:
23245 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23247 case OPC_MULSAQ_S_W_QH
:
23249 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23255 case OPC_ADDU_QB_DSP
:
23257 case OPC_MULEU_S_PH_QBL
:
23259 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23261 case OPC_MULEU_S_PH_QBR
:
23263 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23265 case OPC_MULQ_RS_PH
:
23267 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23269 case OPC_MULEQ_S_W_PHL
:
23271 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23273 case OPC_MULEQ_S_W_PHR
:
23275 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23277 case OPC_MULQ_S_PH
:
23279 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23283 #ifdef TARGET_MIPS64
23284 case OPC_ADDU_OB_DSP
:
23286 case OPC_MULEQ_S_PW_QHL
:
23288 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23290 case OPC_MULEQ_S_PW_QHR
:
23292 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23294 case OPC_MULEU_S_QH_OBL
:
23296 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23298 case OPC_MULEU_S_QH_OBR
:
23300 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23302 case OPC_MULQ_RS_QH
:
23304 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23311 tcg_temp_free_i32(t0
);
23312 tcg_temp_free(v1_t
);
23313 tcg_temp_free(v2_t
);
23316 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23324 /* Treat as NOP. */
23328 t0
= tcg_temp_new();
23329 val_t
= tcg_temp_new();
23330 gen_load_gpr(val_t
, val
);
23333 case OPC_ABSQ_S_PH_DSP
:
23337 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23342 target_long result
;
23343 imm
= (ctx
->opcode
>> 16) & 0xFF;
23344 result
= (uint32_t)imm
<< 24 |
23345 (uint32_t)imm
<< 16 |
23346 (uint32_t)imm
<< 8 |
23348 result
= (int32_t)result
;
23349 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23354 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23355 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23356 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23357 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23358 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23359 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23364 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23365 imm
= (int16_t)(imm
<< 6) >> 6;
23366 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23367 (target_long
)((int32_t)imm
<< 16 | \
23373 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23374 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23375 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23376 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23380 #ifdef TARGET_MIPS64
23381 case OPC_ABSQ_S_QH_DSP
:
23388 imm
= (ctx
->opcode
>> 16) & 0xFF;
23389 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23390 temp
= (temp
<< 16) | temp
;
23391 temp
= (temp
<< 32) | temp
;
23392 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23400 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23401 imm
= (int16_t)(imm
<< 6) >> 6;
23402 temp
= ((target_long
)imm
<< 32) \
23403 | ((target_long
)imm
& 0xFFFFFFFF);
23404 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23412 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23413 imm
= (int16_t)(imm
<< 6) >> 6;
23415 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23416 ((uint64_t)(uint16_t)imm
<< 32) |
23417 ((uint64_t)(uint16_t)imm
<< 16) |
23418 (uint64_t)(uint16_t)imm
;
23419 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23424 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23425 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23426 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23427 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23428 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23429 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23430 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23434 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23435 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23436 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23440 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23441 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23442 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23443 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23444 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23451 tcg_temp_free(val_t
);
23454 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23455 uint32_t op1
, uint32_t op2
,
23456 int ret
, int v1
, int v2
, int check_ret
)
23462 if ((ret
== 0) && (check_ret
== 1)) {
23463 /* Treat as NOP. */
23467 t1
= tcg_temp_new();
23468 v1_t
= tcg_temp_new();
23469 v2_t
= tcg_temp_new();
23471 gen_load_gpr(v1_t
, v1
);
23472 gen_load_gpr(v2_t
, v2
);
23475 case OPC_CMPU_EQ_QB_DSP
:
23477 case OPC_CMPU_EQ_QB
:
23479 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23481 case OPC_CMPU_LT_QB
:
23483 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23485 case OPC_CMPU_LE_QB
:
23487 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23489 case OPC_CMPGU_EQ_QB
:
23491 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23493 case OPC_CMPGU_LT_QB
:
23495 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23497 case OPC_CMPGU_LE_QB
:
23499 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23501 case OPC_CMPGDU_EQ_QB
:
23503 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23504 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23505 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23506 tcg_gen_shli_tl(t1
, t1
, 24);
23507 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23509 case OPC_CMPGDU_LT_QB
:
23511 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23512 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23513 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23514 tcg_gen_shli_tl(t1
, t1
, 24);
23515 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23517 case OPC_CMPGDU_LE_QB
:
23519 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23520 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23521 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23522 tcg_gen_shli_tl(t1
, t1
, 24);
23523 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23525 case OPC_CMP_EQ_PH
:
23527 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23529 case OPC_CMP_LT_PH
:
23531 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23533 case OPC_CMP_LE_PH
:
23535 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23539 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23543 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23545 case OPC_PACKRL_PH
:
23547 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23551 #ifdef TARGET_MIPS64
23552 case OPC_CMPU_EQ_OB_DSP
:
23554 case OPC_CMP_EQ_PW
:
23556 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23558 case OPC_CMP_LT_PW
:
23560 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23562 case OPC_CMP_LE_PW
:
23564 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23566 case OPC_CMP_EQ_QH
:
23568 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23570 case OPC_CMP_LT_QH
:
23572 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23574 case OPC_CMP_LE_QH
:
23576 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23578 case OPC_CMPGDU_EQ_OB
:
23580 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23582 case OPC_CMPGDU_LT_OB
:
23584 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23586 case OPC_CMPGDU_LE_OB
:
23588 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23590 case OPC_CMPGU_EQ_OB
:
23592 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23594 case OPC_CMPGU_LT_OB
:
23596 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23598 case OPC_CMPGU_LE_OB
:
23600 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23602 case OPC_CMPU_EQ_OB
:
23604 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23606 case OPC_CMPU_LT_OB
:
23608 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23610 case OPC_CMPU_LE_OB
:
23612 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23614 case OPC_PACKRL_PW
:
23616 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23620 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23624 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23628 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23636 tcg_temp_free(v1_t
);
23637 tcg_temp_free(v2_t
);
23640 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23641 uint32_t op1
, int rt
, int rs
, int sa
)
23648 /* Treat as NOP. */
23652 t0
= tcg_temp_new();
23653 gen_load_gpr(t0
, rs
);
23656 case OPC_APPEND_DSP
:
23657 switch (MASK_APPEND(ctx
->opcode
)) {
23660 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23662 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23666 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23667 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23668 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23669 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23671 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23675 if (sa
!= 0 && sa
!= 2) {
23676 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23677 tcg_gen_ext32u_tl(t0
, t0
);
23678 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23679 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23681 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23683 default: /* Invalid */
23684 MIPS_INVAL("MASK APPEND");
23685 generate_exception_end(ctx
, EXCP_RI
);
23689 #ifdef TARGET_MIPS64
23690 case OPC_DAPPEND_DSP
:
23691 switch (MASK_DAPPEND(ctx
->opcode
)) {
23694 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23698 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23699 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23700 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23704 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23705 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23706 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23711 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23712 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23713 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23714 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23717 default: /* Invalid */
23718 MIPS_INVAL("MASK DAPPEND");
23719 generate_exception_end(ctx
, EXCP_RI
);
23728 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23729 int ret
, int v1
, int v2
, int check_ret
)
23738 if ((ret
== 0) && (check_ret
== 1)) {
23739 /* Treat as NOP. */
23743 t0
= tcg_temp_new();
23744 t1
= tcg_temp_new();
23745 v1_t
= tcg_temp_new();
23746 v2_t
= tcg_temp_new();
23748 gen_load_gpr(v1_t
, v1
);
23749 gen_load_gpr(v2_t
, v2
);
23752 case OPC_EXTR_W_DSP
:
23756 tcg_gen_movi_tl(t0
, v2
);
23757 tcg_gen_movi_tl(t1
, v1
);
23758 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23761 tcg_gen_movi_tl(t0
, v2
);
23762 tcg_gen_movi_tl(t1
, v1
);
23763 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23765 case OPC_EXTR_RS_W
:
23766 tcg_gen_movi_tl(t0
, v2
);
23767 tcg_gen_movi_tl(t1
, v1
);
23768 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23771 tcg_gen_movi_tl(t0
, v2
);
23772 tcg_gen_movi_tl(t1
, v1
);
23773 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23775 case OPC_EXTRV_S_H
:
23776 tcg_gen_movi_tl(t0
, v2
);
23777 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23780 tcg_gen_movi_tl(t0
, v2
);
23781 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23783 case OPC_EXTRV_R_W
:
23784 tcg_gen_movi_tl(t0
, v2
);
23785 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23787 case OPC_EXTRV_RS_W
:
23788 tcg_gen_movi_tl(t0
, v2
);
23789 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23792 tcg_gen_movi_tl(t0
, v2
);
23793 tcg_gen_movi_tl(t1
, v1
);
23794 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23797 tcg_gen_movi_tl(t0
, v2
);
23798 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23801 tcg_gen_movi_tl(t0
, v2
);
23802 tcg_gen_movi_tl(t1
, v1
);
23803 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23806 tcg_gen_movi_tl(t0
, v2
);
23807 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23810 imm
= (ctx
->opcode
>> 20) & 0x3F;
23811 tcg_gen_movi_tl(t0
, ret
);
23812 tcg_gen_movi_tl(t1
, imm
);
23813 gen_helper_shilo(t0
, t1
, cpu_env
);
23816 tcg_gen_movi_tl(t0
, ret
);
23817 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23820 tcg_gen_movi_tl(t0
, ret
);
23821 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23824 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23825 tcg_gen_movi_tl(t0
, imm
);
23826 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23829 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23830 tcg_gen_movi_tl(t0
, imm
);
23831 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23835 #ifdef TARGET_MIPS64
23836 case OPC_DEXTR_W_DSP
:
23840 tcg_gen_movi_tl(t0
, ret
);
23841 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23845 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23846 int ac
= (ctx
->opcode
>> 11) & 0x03;
23847 tcg_gen_movi_tl(t0
, shift
);
23848 tcg_gen_movi_tl(t1
, ac
);
23849 gen_helper_dshilo(t0
, t1
, cpu_env
);
23854 int ac
= (ctx
->opcode
>> 11) & 0x03;
23855 tcg_gen_movi_tl(t0
, ac
);
23856 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23860 tcg_gen_movi_tl(t0
, v2
);
23861 tcg_gen_movi_tl(t1
, v1
);
23863 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23866 tcg_gen_movi_tl(t0
, v2
);
23867 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23870 tcg_gen_movi_tl(t0
, v2
);
23871 tcg_gen_movi_tl(t1
, v1
);
23872 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23875 tcg_gen_movi_tl(t0
, v2
);
23876 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23879 tcg_gen_movi_tl(t0
, v2
);
23880 tcg_gen_movi_tl(t1
, v1
);
23881 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23883 case OPC_DEXTR_R_L
:
23884 tcg_gen_movi_tl(t0
, v2
);
23885 tcg_gen_movi_tl(t1
, v1
);
23886 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23888 case OPC_DEXTR_RS_L
:
23889 tcg_gen_movi_tl(t0
, v2
);
23890 tcg_gen_movi_tl(t1
, v1
);
23891 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23894 tcg_gen_movi_tl(t0
, v2
);
23895 tcg_gen_movi_tl(t1
, v1
);
23896 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23898 case OPC_DEXTR_R_W
:
23899 tcg_gen_movi_tl(t0
, v2
);
23900 tcg_gen_movi_tl(t1
, v1
);
23901 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23903 case OPC_DEXTR_RS_W
:
23904 tcg_gen_movi_tl(t0
, v2
);
23905 tcg_gen_movi_tl(t1
, v1
);
23906 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23908 case OPC_DEXTR_S_H
:
23909 tcg_gen_movi_tl(t0
, v2
);
23910 tcg_gen_movi_tl(t1
, v1
);
23911 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23913 case OPC_DEXTRV_S_H
:
23914 tcg_gen_movi_tl(t0
, v2
);
23915 tcg_gen_movi_tl(t1
, v1
);
23916 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23919 tcg_gen_movi_tl(t0
, v2
);
23920 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23922 case OPC_DEXTRV_R_L
:
23923 tcg_gen_movi_tl(t0
, v2
);
23924 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23926 case OPC_DEXTRV_RS_L
:
23927 tcg_gen_movi_tl(t0
, v2
);
23928 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23931 tcg_gen_movi_tl(t0
, v2
);
23932 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23934 case OPC_DEXTRV_R_W
:
23935 tcg_gen_movi_tl(t0
, v2
);
23936 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23938 case OPC_DEXTRV_RS_W
:
23939 tcg_gen_movi_tl(t0
, v2
);
23940 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23949 tcg_temp_free(v1_t
);
23950 tcg_temp_free(v2_t
);
23953 /* End MIPSDSP functions. */
23955 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
23957 int rs
, rt
, rd
, sa
;
23960 rs
= (ctx
->opcode
>> 21) & 0x1f;
23961 rt
= (ctx
->opcode
>> 16) & 0x1f;
23962 rd
= (ctx
->opcode
>> 11) & 0x1f;
23963 sa
= (ctx
->opcode
>> 6) & 0x1f;
23965 op1
= MASK_SPECIAL(ctx
->opcode
);
23968 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
23974 op2
= MASK_R6_MULDIV(ctx
->opcode
);
23984 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
23987 MIPS_INVAL("special_r6 muldiv");
23988 generate_exception_end(ctx
, EXCP_RI
);
23994 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
23998 if (rt
== 0 && sa
== 1) {
23999 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
24000 We need additionally to check other fields */
24001 gen_cl(ctx
, op1
, rd
, rs
);
24003 generate_exception_end(ctx
, EXCP_RI
);
24007 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24008 gen_helper_do_semihosting(cpu_env
);
24010 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
24011 generate_exception_end(ctx
, EXCP_RI
);
24013 generate_exception_end(ctx
, EXCP_DBp
);
24017 #if defined(TARGET_MIPS64)
24019 check_mips_64(ctx
);
24020 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24024 if (rt
== 0 && sa
== 1) {
24025 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
24026 We need additionally to check other fields */
24027 check_mips_64(ctx
);
24028 gen_cl(ctx
, op1
, rd
, rs
);
24030 generate_exception_end(ctx
, EXCP_RI
);
24038 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24048 check_mips_64(ctx
);
24049 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24052 MIPS_INVAL("special_r6 muldiv");
24053 generate_exception_end(ctx
, EXCP_RI
);
24058 default: /* Invalid */
24059 MIPS_INVAL("special_r6");
24060 generate_exception_end(ctx
, EXCP_RI
);
24065 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24067 int rs
= extract32(ctx
->opcode
, 21, 5);
24068 int rt
= extract32(ctx
->opcode
, 16, 5);
24069 int rd
= extract32(ctx
->opcode
, 11, 5);
24070 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24073 case OPC_MOVN
: /* Conditional move */
24075 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24077 case OPC_MFHI
: /* Move from HI/LO */
24079 gen_HILO(ctx
, op1
, 0, rd
);
24082 case OPC_MTLO
: /* Move to HI/LO */
24083 gen_HILO(ctx
, op1
, 0, rs
);
24087 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24091 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24093 #if defined(TARGET_MIPS64)
24098 check_insn_opc_user_only(ctx
, INSN_R5900
);
24099 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24103 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24105 default: /* Invalid */
24106 MIPS_INVAL("special_tx79");
24107 generate_exception_end(ctx
, EXCP_RI
);
24112 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24114 int rs
, rt
, rd
, sa
;
24117 rs
= (ctx
->opcode
>> 21) & 0x1f;
24118 rt
= (ctx
->opcode
>> 16) & 0x1f;
24119 rd
= (ctx
->opcode
>> 11) & 0x1f;
24120 sa
= (ctx
->opcode
>> 6) & 0x1f;
24122 op1
= MASK_SPECIAL(ctx
->opcode
);
24124 case OPC_MOVN
: /* Conditional move */
24126 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24127 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24128 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24130 case OPC_MFHI
: /* Move from HI/LO */
24132 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24135 case OPC_MTLO
: /* Move to HI/LO */
24136 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24139 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24140 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24141 check_cp1_enabled(ctx
);
24142 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24143 (ctx
->opcode
>> 16) & 1);
24145 generate_exception_err(ctx
, EXCP_CpU
, 1);
24151 check_insn(ctx
, INSN_VR54XX
);
24152 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24153 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24155 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24160 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24162 #if defined(TARGET_MIPS64)
24167 check_insn(ctx
, ISA_MIPS3
);
24168 check_mips_64(ctx
);
24169 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24173 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24176 #ifdef MIPS_STRICT_STANDARD
24177 MIPS_INVAL("SPIM");
24178 generate_exception_end(ctx
, EXCP_RI
);
24180 /* Implemented as RI exception for now. */
24181 MIPS_INVAL("spim (unofficial)");
24182 generate_exception_end(ctx
, EXCP_RI
);
24185 default: /* Invalid */
24186 MIPS_INVAL("special_legacy");
24187 generate_exception_end(ctx
, EXCP_RI
);
24192 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24194 int rs
, rt
, rd
, sa
;
24197 rs
= (ctx
->opcode
>> 21) & 0x1f;
24198 rt
= (ctx
->opcode
>> 16) & 0x1f;
24199 rd
= (ctx
->opcode
>> 11) & 0x1f;
24200 sa
= (ctx
->opcode
>> 6) & 0x1f;
24202 op1
= MASK_SPECIAL(ctx
->opcode
);
24204 case OPC_SLL
: /* Shift with immediate */
24205 if (sa
== 5 && rd
== 0 &&
24206 rs
== 0 && rt
== 0) { /* PAUSE */
24207 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24208 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24209 generate_exception_end(ctx
, EXCP_RI
);
24215 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24218 switch ((ctx
->opcode
>> 21) & 0x1f) {
24220 /* rotr is decoded as srl on non-R2 CPUs */
24221 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24226 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24229 generate_exception_end(ctx
, EXCP_RI
);
24237 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24239 case OPC_SLLV
: /* Shifts */
24241 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24244 switch ((ctx
->opcode
>> 6) & 0x1f) {
24246 /* rotrv is decoded as srlv on non-R2 CPUs */
24247 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24252 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24255 generate_exception_end(ctx
, EXCP_RI
);
24259 case OPC_SLT
: /* Set on less than */
24261 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24263 case OPC_AND
: /* Logic*/
24267 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24270 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24272 case OPC_TGE
: /* Traps */
24278 check_insn(ctx
, ISA_MIPS2
);
24279 gen_trap(ctx
, op1
, rs
, rt
, -1);
24281 case OPC_LSA
: /* OPC_PMON */
24282 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24283 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24284 decode_opc_special_r6(env
, ctx
);
24286 /* Pmon entry point, also R4010 selsl */
24287 #ifdef MIPS_STRICT_STANDARD
24288 MIPS_INVAL("PMON / selsl");
24289 generate_exception_end(ctx
, EXCP_RI
);
24291 gen_helper_0e0i(pmon
, sa
);
24296 generate_exception_end(ctx
, EXCP_SYSCALL
);
24299 generate_exception_end(ctx
, EXCP_BREAK
);
24302 check_insn(ctx
, ISA_MIPS2
);
24303 gen_sync(extract32(ctx
->opcode
, 6, 5));
24306 #if defined(TARGET_MIPS64)
24307 /* MIPS64 specific opcodes */
24312 check_insn(ctx
, ISA_MIPS3
);
24313 check_mips_64(ctx
);
24314 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24317 switch ((ctx
->opcode
>> 21) & 0x1f) {
24319 /* drotr is decoded as dsrl on non-R2 CPUs */
24320 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24325 check_insn(ctx
, ISA_MIPS3
);
24326 check_mips_64(ctx
);
24327 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24330 generate_exception_end(ctx
, EXCP_RI
);
24335 switch ((ctx
->opcode
>> 21) & 0x1f) {
24337 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24338 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24343 check_insn(ctx
, ISA_MIPS3
);
24344 check_mips_64(ctx
);
24345 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24348 generate_exception_end(ctx
, EXCP_RI
);
24356 check_insn(ctx
, ISA_MIPS3
);
24357 check_mips_64(ctx
);
24358 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24362 check_insn(ctx
, ISA_MIPS3
);
24363 check_mips_64(ctx
);
24364 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24367 switch ((ctx
->opcode
>> 6) & 0x1f) {
24369 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24370 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24375 check_insn(ctx
, ISA_MIPS3
);
24376 check_mips_64(ctx
);
24377 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24380 generate_exception_end(ctx
, EXCP_RI
);
24385 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24386 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24387 decode_opc_special_r6(env
, ctx
);
24392 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24393 decode_opc_special_r6(env
, ctx
);
24394 } else if (ctx
->insn_flags
& INSN_R5900
) {
24395 decode_opc_special_tx79(env
, ctx
);
24397 decode_opc_special_legacy(env
, ctx
);
24403 #if defined(TARGET_MIPS64)
24407 * MMI (MultiMedia Interface) ASE instructions
24408 * ===========================================
24412 * MMI instructions category: data communication
24413 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24415 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24416 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24417 * PCPYUD PEXEH PEXTLW PPACW
24426 * Parallel Copy Halfword
24428 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24429 * +-----------+---------+---------+---------+---------+-----------+
24430 * | MMI |0 0 0 0 0| rt | rd | PCPYH | MMI3 |
24431 * +-----------+---------+---------+---------+---------+-----------+
24433 static void gen_mmi_pcpyh(DisasContext
*ctx
)
24435 uint32_t pd
, rt
, rd
;
24438 opcode
= ctx
->opcode
;
24440 pd
= extract32(opcode
, 21, 5);
24441 rt
= extract32(opcode
, 16, 5);
24442 rd
= extract32(opcode
, 11, 5);
24444 if (unlikely(pd
!= 0)) {
24445 generate_exception_end(ctx
, EXCP_RI
);
24446 } else if (rd
== 0) {
24448 } else if (rt
== 0) {
24449 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24450 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24452 TCGv_i64 t0
= tcg_temp_new();
24453 TCGv_i64 t1
= tcg_temp_new();
24454 uint64_t mask
= (1ULL << 16) - 1;
24456 tcg_gen_andi_i64(t0
, cpu_gpr
[rt
], mask
);
24457 tcg_gen_movi_i64(t1
, 0);
24458 tcg_gen_or_i64(t1
, t0
, t1
);
24459 tcg_gen_shli_i64(t0
, t0
, 16);
24460 tcg_gen_or_i64(t1
, t0
, t1
);
24461 tcg_gen_shli_i64(t0
, t0
, 16);
24462 tcg_gen_or_i64(t1
, t0
, t1
);
24463 tcg_gen_shli_i64(t0
, t0
, 16);
24464 tcg_gen_or_i64(t1
, t0
, t1
);
24466 tcg_gen_mov_i64(cpu_gpr
[rd
], t1
);
24468 tcg_gen_andi_i64(t0
, cpu_mmr
[rt
], mask
);
24469 tcg_gen_movi_i64(t1
, 0);
24470 tcg_gen_or_i64(t1
, t0
, t1
);
24471 tcg_gen_shli_i64(t0
, t0
, 16);
24472 tcg_gen_or_i64(t1
, t0
, t1
);
24473 tcg_gen_shli_i64(t0
, t0
, 16);
24474 tcg_gen_or_i64(t1
, t0
, t1
);
24475 tcg_gen_shli_i64(t0
, t0
, 16);
24476 tcg_gen_or_i64(t1
, t0
, t1
);
24478 tcg_gen_mov_i64(cpu_mmr
[rd
], t1
);
24486 * PCPYLD rd, rs, rt
24488 * Parallel Copy Lower Doubleword
24490 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24491 * +-----------+---------+---------+---------+---------+-----------+
24492 * | MMI | rs | rt | rd | PCPYLD | MMI2 |
24493 * +-----------+---------+---------+---------+---------+-----------+
24495 static void gen_mmi_pcpyld(DisasContext
*ctx
)
24497 uint32_t rs
, rt
, rd
;
24500 opcode
= ctx
->opcode
;
24502 rs
= extract32(opcode
, 21, 5);
24503 rt
= extract32(opcode
, 16, 5);
24504 rd
= extract32(opcode
, 11, 5);
24510 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24512 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_gpr
[rs
]);
24515 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24518 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr
[rt
]);
24525 * PCPYUD rd, rs, rt
24527 * Parallel Copy Upper Doubleword
24529 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24530 * +-----------+---------+---------+---------+---------+-----------+
24531 * | MMI | rs | rt | rd | PCPYUD | MMI3 |
24532 * +-----------+---------+---------+---------+---------+-----------+
24534 static void gen_mmi_pcpyud(DisasContext
*ctx
)
24536 uint32_t rs
, rt
, rd
;
24539 opcode
= ctx
->opcode
;
24541 rs
= extract32(opcode
, 21, 5);
24542 rt
= extract32(opcode
, 16, 5);
24543 rd
= extract32(opcode
, 11, 5);
24549 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24551 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_mmr
[rs
]);
24554 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24557 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_mmr
[rt
]);
24566 #if !defined(TARGET_MIPS64)
24568 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24569 #define MXU_APTN1_A 0
24570 #define MXU_APTN1_S 1
24572 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24573 #define MXU_APTN2_AA 0
24574 #define MXU_APTN2_AS 1
24575 #define MXU_APTN2_SA 2
24576 #define MXU_APTN2_SS 3
24578 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24579 #define MXU_EPTN2_AA 0
24580 #define MXU_EPTN2_AS 1
24581 #define MXU_EPTN2_SA 2
24582 #define MXU_EPTN2_SS 3
24584 /* MXU operand getting pattern 'optn2' */
24585 #define MXU_OPTN2_PTN0 0
24586 #define MXU_OPTN2_PTN1 1
24587 #define MXU_OPTN2_PTN2 2
24588 #define MXU_OPTN2_PTN3 3
24589 /* alternative naming scheme for 'optn2' */
24590 #define MXU_OPTN2_WW 0
24591 #define MXU_OPTN2_LW 1
24592 #define MXU_OPTN2_HW 2
24593 #define MXU_OPTN2_XW 3
24595 /* MXU operand getting pattern 'optn3' */
24596 #define MXU_OPTN3_PTN0 0
24597 #define MXU_OPTN3_PTN1 1
24598 #define MXU_OPTN3_PTN2 2
24599 #define MXU_OPTN3_PTN3 3
24600 #define MXU_OPTN3_PTN4 4
24601 #define MXU_OPTN3_PTN5 5
24602 #define MXU_OPTN3_PTN6 6
24603 #define MXU_OPTN3_PTN7 7
24607 * S32I2M XRa, rb - Register move from GRF to XRF
24609 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24614 t0
= tcg_temp_new();
24616 XRa
= extract32(ctx
->opcode
, 6, 5);
24617 Rb
= extract32(ctx
->opcode
, 16, 5);
24619 gen_load_gpr(t0
, Rb
);
24621 gen_store_mxu_gpr(t0
, XRa
);
24622 } else if (XRa
== 16) {
24623 gen_store_mxu_cr(t0
);
24630 * S32M2I XRa, rb - Register move from XRF to GRF
24632 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24637 t0
= tcg_temp_new();
24639 XRa
= extract32(ctx
->opcode
, 6, 5);
24640 Rb
= extract32(ctx
->opcode
, 16, 5);
24643 gen_load_mxu_gpr(t0
, XRa
);
24644 } else if (XRa
== 16) {
24645 gen_load_mxu_cr(t0
);
24648 gen_store_gpr(t0
, Rb
);
24654 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24656 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24659 uint32_t XRa
, Rb
, s8
, optn3
;
24661 t0
= tcg_temp_new();
24662 t1
= tcg_temp_new();
24664 XRa
= extract32(ctx
->opcode
, 6, 4);
24665 s8
= extract32(ctx
->opcode
, 10, 8);
24666 optn3
= extract32(ctx
->opcode
, 18, 3);
24667 Rb
= extract32(ctx
->opcode
, 21, 5);
24669 gen_load_gpr(t0
, Rb
);
24670 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24673 /* XRa[7:0] = tmp8 */
24674 case MXU_OPTN3_PTN0
:
24675 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24676 gen_load_mxu_gpr(t0
, XRa
);
24677 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24679 /* XRa[15:8] = tmp8 */
24680 case MXU_OPTN3_PTN1
:
24681 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24682 gen_load_mxu_gpr(t0
, XRa
);
24683 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24685 /* XRa[23:16] = tmp8 */
24686 case MXU_OPTN3_PTN2
:
24687 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24688 gen_load_mxu_gpr(t0
, XRa
);
24689 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24691 /* XRa[31:24] = tmp8 */
24692 case MXU_OPTN3_PTN3
:
24693 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24694 gen_load_mxu_gpr(t0
, XRa
);
24695 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24697 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24698 case MXU_OPTN3_PTN4
:
24699 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24700 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24702 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24703 case MXU_OPTN3_PTN5
:
24704 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24705 tcg_gen_shli_tl(t1
, t1
, 8);
24706 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24708 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24709 case MXU_OPTN3_PTN6
:
24710 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24711 tcg_gen_mov_tl(t0
, t1
);
24712 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24713 tcg_gen_shli_tl(t1
, t1
, 16);
24714 tcg_gen_or_tl(t0
, t0
, t1
);
24716 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24717 case MXU_OPTN3_PTN7
:
24718 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24719 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24720 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24724 gen_store_mxu_gpr(t0
, XRa
);
24731 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24733 static void gen_mxu_d16mul(DisasContext
*ctx
)
24735 TCGv t0
, t1
, t2
, t3
;
24736 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24738 t0
= tcg_temp_new();
24739 t1
= tcg_temp_new();
24740 t2
= tcg_temp_new();
24741 t3
= tcg_temp_new();
24743 XRa
= extract32(ctx
->opcode
, 6, 4);
24744 XRb
= extract32(ctx
->opcode
, 10, 4);
24745 XRc
= extract32(ctx
->opcode
, 14, 4);
24746 XRd
= extract32(ctx
->opcode
, 18, 4);
24747 optn2
= extract32(ctx
->opcode
, 22, 2);
24749 gen_load_mxu_gpr(t1
, XRb
);
24750 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24751 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24752 gen_load_mxu_gpr(t3
, XRc
);
24753 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24754 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24757 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24758 tcg_gen_mul_tl(t3
, t1
, t3
);
24759 tcg_gen_mul_tl(t2
, t0
, t2
);
24761 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24762 tcg_gen_mul_tl(t3
, t0
, t3
);
24763 tcg_gen_mul_tl(t2
, t0
, t2
);
24765 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24766 tcg_gen_mul_tl(t3
, t1
, t3
);
24767 tcg_gen_mul_tl(t2
, t1
, t2
);
24769 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24770 tcg_gen_mul_tl(t3
, t0
, t3
);
24771 tcg_gen_mul_tl(t2
, t1
, t2
);
24774 gen_store_mxu_gpr(t3
, XRa
);
24775 gen_store_mxu_gpr(t2
, XRd
);
24784 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24787 static void gen_mxu_d16mac(DisasContext
*ctx
)
24789 TCGv t0
, t1
, t2
, t3
;
24790 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24792 t0
= tcg_temp_new();
24793 t1
= tcg_temp_new();
24794 t2
= tcg_temp_new();
24795 t3
= tcg_temp_new();
24797 XRa
= extract32(ctx
->opcode
, 6, 4);
24798 XRb
= extract32(ctx
->opcode
, 10, 4);
24799 XRc
= extract32(ctx
->opcode
, 14, 4);
24800 XRd
= extract32(ctx
->opcode
, 18, 4);
24801 optn2
= extract32(ctx
->opcode
, 22, 2);
24802 aptn2
= extract32(ctx
->opcode
, 24, 2);
24804 gen_load_mxu_gpr(t1
, XRb
);
24805 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24806 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24808 gen_load_mxu_gpr(t3
, XRc
);
24809 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24810 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24813 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24814 tcg_gen_mul_tl(t3
, t1
, t3
);
24815 tcg_gen_mul_tl(t2
, t0
, t2
);
24817 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24818 tcg_gen_mul_tl(t3
, t0
, t3
);
24819 tcg_gen_mul_tl(t2
, t0
, t2
);
24821 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24822 tcg_gen_mul_tl(t3
, t1
, t3
);
24823 tcg_gen_mul_tl(t2
, t1
, t2
);
24825 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24826 tcg_gen_mul_tl(t3
, t0
, t3
);
24827 tcg_gen_mul_tl(t2
, t1
, t2
);
24830 gen_load_mxu_gpr(t0
, XRa
);
24831 gen_load_mxu_gpr(t1
, XRd
);
24835 tcg_gen_add_tl(t3
, t0
, t3
);
24836 tcg_gen_add_tl(t2
, t1
, t2
);
24839 tcg_gen_add_tl(t3
, t0
, t3
);
24840 tcg_gen_sub_tl(t2
, t1
, t2
);
24843 tcg_gen_sub_tl(t3
, t0
, t3
);
24844 tcg_gen_add_tl(t2
, t1
, t2
);
24847 tcg_gen_sub_tl(t3
, t0
, t3
);
24848 tcg_gen_sub_tl(t2
, t1
, t2
);
24851 gen_store_mxu_gpr(t3
, XRa
);
24852 gen_store_mxu_gpr(t2
, XRd
);
24861 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24862 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24864 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24866 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24867 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24869 t0
= tcg_temp_new();
24870 t1
= tcg_temp_new();
24871 t2
= tcg_temp_new();
24872 t3
= tcg_temp_new();
24873 t4
= tcg_temp_new();
24874 t5
= tcg_temp_new();
24875 t6
= tcg_temp_new();
24876 t7
= tcg_temp_new();
24878 XRa
= extract32(ctx
->opcode
, 6, 4);
24879 XRb
= extract32(ctx
->opcode
, 10, 4);
24880 XRc
= extract32(ctx
->opcode
, 14, 4);
24881 XRd
= extract32(ctx
->opcode
, 18, 4);
24882 sel
= extract32(ctx
->opcode
, 22, 2);
24884 gen_load_mxu_gpr(t3
, XRb
);
24885 gen_load_mxu_gpr(t7
, XRc
);
24889 tcg_gen_ext8s_tl(t0
, t3
);
24890 tcg_gen_shri_tl(t3
, t3
, 8);
24891 tcg_gen_ext8s_tl(t1
, t3
);
24892 tcg_gen_shri_tl(t3
, t3
, 8);
24893 tcg_gen_ext8s_tl(t2
, t3
);
24894 tcg_gen_shri_tl(t3
, t3
, 8);
24895 tcg_gen_ext8s_tl(t3
, t3
);
24898 tcg_gen_ext8u_tl(t0
, t3
);
24899 tcg_gen_shri_tl(t3
, t3
, 8);
24900 tcg_gen_ext8u_tl(t1
, t3
);
24901 tcg_gen_shri_tl(t3
, t3
, 8);
24902 tcg_gen_ext8u_tl(t2
, t3
);
24903 tcg_gen_shri_tl(t3
, t3
, 8);
24904 tcg_gen_ext8u_tl(t3
, t3
);
24907 tcg_gen_ext8u_tl(t4
, t7
);
24908 tcg_gen_shri_tl(t7
, t7
, 8);
24909 tcg_gen_ext8u_tl(t5
, t7
);
24910 tcg_gen_shri_tl(t7
, t7
, 8);
24911 tcg_gen_ext8u_tl(t6
, t7
);
24912 tcg_gen_shri_tl(t7
, t7
, 8);
24913 tcg_gen_ext8u_tl(t7
, t7
);
24915 tcg_gen_mul_tl(t0
, t0
, t4
);
24916 tcg_gen_mul_tl(t1
, t1
, t5
);
24917 tcg_gen_mul_tl(t2
, t2
, t6
);
24918 tcg_gen_mul_tl(t3
, t3
, t7
);
24920 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
24921 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
24922 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
24923 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
24925 tcg_gen_shli_tl(t1
, t1
, 16);
24926 tcg_gen_shli_tl(t3
, t3
, 16);
24928 tcg_gen_or_tl(t0
, t0
, t1
);
24929 tcg_gen_or_tl(t1
, t2
, t3
);
24931 gen_store_mxu_gpr(t0
, XRd
);
24932 gen_store_mxu_gpr(t1
, XRa
);
24945 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
24946 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
24948 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
24951 uint32_t XRa
, Rb
, s12
, sel
;
24953 t0
= tcg_temp_new();
24954 t1
= tcg_temp_new();
24956 XRa
= extract32(ctx
->opcode
, 6, 4);
24957 s12
= extract32(ctx
->opcode
, 10, 10);
24958 sel
= extract32(ctx
->opcode
, 20, 1);
24959 Rb
= extract32(ctx
->opcode
, 21, 5);
24961 gen_load_gpr(t0
, Rb
);
24963 tcg_gen_movi_tl(t1
, s12
);
24964 tcg_gen_shli_tl(t1
, t1
, 2);
24966 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
24968 tcg_gen_add_tl(t1
, t0
, t1
);
24969 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
24973 tcg_gen_bswap32_tl(t1
, t1
);
24975 gen_store_mxu_gpr(t1
, XRa
);
24983 * MXU instruction category: logic
24984 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24986 * S32NOR S32AND S32OR S32XOR
24990 * S32NOR XRa, XRb, XRc
24991 * Update XRa with the result of logical bitwise 'nor' operation
24992 * applied to the content of XRb and XRc.
24994 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24995 * +-----------+---------+-----+-------+-------+-------+-----------+
24996 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
24997 * +-----------+---------+-----+-------+-------+-------+-----------+
24999 static void gen_mxu_S32NOR(DisasContext
*ctx
)
25001 uint32_t pad
, XRc
, XRb
, XRa
;
25003 pad
= extract32(ctx
->opcode
, 21, 5);
25004 XRc
= extract32(ctx
->opcode
, 14, 4);
25005 XRb
= extract32(ctx
->opcode
, 10, 4);
25006 XRa
= extract32(ctx
->opcode
, 6, 4);
25008 if (unlikely(pad
!= 0)) {
25009 /* opcode padding incorrect -> do nothing */
25010 } else if (unlikely(XRa
== 0)) {
25011 /* destination is zero register -> do nothing */
25012 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25013 /* both operands zero registers -> just set destination to all 1s */
25014 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
25015 } else if (unlikely(XRb
== 0)) {
25016 /* XRb zero register -> just set destination to the negation of XRc */
25017 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25018 } else if (unlikely(XRc
== 0)) {
25019 /* XRa zero register -> just set destination to the negation of XRb */
25020 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25021 } else if (unlikely(XRb
== XRc
)) {
25022 /* both operands same -> just set destination to the negation of XRb */
25023 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25025 /* the most general case */
25026 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25031 * S32AND XRa, XRb, XRc
25032 * Update XRa with the result of logical bitwise 'and' operation
25033 * applied to the content of XRb and XRc.
25035 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25036 * +-----------+---------+-----+-------+-------+-------+-----------+
25037 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25038 * +-----------+---------+-----+-------+-------+-------+-----------+
25040 static void gen_mxu_S32AND(DisasContext
*ctx
)
25042 uint32_t pad
, XRc
, XRb
, XRa
;
25044 pad
= extract32(ctx
->opcode
, 21, 5);
25045 XRc
= extract32(ctx
->opcode
, 14, 4);
25046 XRb
= extract32(ctx
->opcode
, 10, 4);
25047 XRa
= extract32(ctx
->opcode
, 6, 4);
25049 if (unlikely(pad
!= 0)) {
25050 /* opcode padding incorrect -> do nothing */
25051 } else if (unlikely(XRa
== 0)) {
25052 /* destination is zero register -> do nothing */
25053 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25054 /* one of operands zero register -> just set destination to all 0s */
25055 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25056 } else if (unlikely(XRb
== XRc
)) {
25057 /* both operands same -> just set destination to one of them */
25058 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25060 /* the most general case */
25061 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25066 * S32OR XRa, XRb, XRc
25067 * Update XRa with the result of logical bitwise 'or' operation
25068 * applied to the content of XRb and XRc.
25070 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25071 * +-----------+---------+-----+-------+-------+-------+-----------+
25072 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25073 * +-----------+---------+-----+-------+-------+-------+-----------+
25075 static void gen_mxu_S32OR(DisasContext
*ctx
)
25077 uint32_t pad
, XRc
, XRb
, XRa
;
25079 pad
= extract32(ctx
->opcode
, 21, 5);
25080 XRc
= extract32(ctx
->opcode
, 14, 4);
25081 XRb
= extract32(ctx
->opcode
, 10, 4);
25082 XRa
= extract32(ctx
->opcode
, 6, 4);
25084 if (unlikely(pad
!= 0)) {
25085 /* opcode padding incorrect -> do nothing */
25086 } else if (unlikely(XRa
== 0)) {
25087 /* destination is zero register -> do nothing */
25088 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25089 /* both operands zero registers -> just set destination to all 0s */
25090 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25091 } else if (unlikely(XRb
== 0)) {
25092 /* XRb zero register -> just set destination to the content of XRc */
25093 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25094 } else if (unlikely(XRc
== 0)) {
25095 /* XRc zero register -> just set destination to the content of XRb */
25096 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25097 } else if (unlikely(XRb
== XRc
)) {
25098 /* both operands same -> just set destination to one of them */
25099 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25101 /* the most general case */
25102 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25107 * S32XOR XRa, XRb, XRc
25108 * Update XRa with the result of logical bitwise 'xor' operation
25109 * applied to the content of XRb and XRc.
25111 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25112 * +-----------+---------+-----+-------+-------+-------+-----------+
25113 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25114 * +-----------+---------+-----+-------+-------+-------+-----------+
25116 static void gen_mxu_S32XOR(DisasContext
*ctx
)
25118 uint32_t pad
, XRc
, XRb
, XRa
;
25120 pad
= extract32(ctx
->opcode
, 21, 5);
25121 XRc
= extract32(ctx
->opcode
, 14, 4);
25122 XRb
= extract32(ctx
->opcode
, 10, 4);
25123 XRa
= extract32(ctx
->opcode
, 6, 4);
25125 if (unlikely(pad
!= 0)) {
25126 /* opcode padding incorrect -> do nothing */
25127 } else if (unlikely(XRa
== 0)) {
25128 /* destination is zero register -> do nothing */
25129 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25130 /* both operands zero registers -> just set destination to all 0s */
25131 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25132 } else if (unlikely(XRb
== 0)) {
25133 /* XRb zero register -> just set destination to the content of XRc */
25134 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25135 } else if (unlikely(XRc
== 0)) {
25136 /* XRc zero register -> just set destination to the content of XRb */
25137 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25138 } else if (unlikely(XRb
== XRc
)) {
25139 /* both operands same -> just set destination to all 0s */
25140 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25142 /* the most general case */
25143 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25149 * MXU instruction category max/min
25150 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25152 * S32MAX D16MAX Q8MAX
25153 * S32MIN D16MIN Q8MIN
25157 * S32MAX XRa, XRb, XRc
25158 * Update XRa with the maximum of signed 32-bit integers contained
25161 * S32MIN XRa, XRb, XRc
25162 * Update XRa with the minimum of signed 32-bit integers contained
25165 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25166 * +-----------+---------+-----+-------+-------+-------+-----------+
25167 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25168 * +-----------+---------+-----+-------+-------+-------+-----------+
25170 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
25172 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25174 pad
= extract32(ctx
->opcode
, 21, 5);
25175 opc
= extract32(ctx
->opcode
, 18, 3);
25176 XRc
= extract32(ctx
->opcode
, 14, 4);
25177 XRb
= extract32(ctx
->opcode
, 10, 4);
25178 XRa
= extract32(ctx
->opcode
, 6, 4);
25180 if (unlikely(pad
!= 0)) {
25181 /* opcode padding incorrect -> do nothing */
25182 } else if (unlikely(XRa
== 0)) {
25183 /* destination is zero register -> do nothing */
25184 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25185 /* both operands zero registers -> just set destination to zero */
25186 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25187 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25188 /* exactly one operand is zero register - find which one is not...*/
25189 uint32_t XRx
= XRb
? XRb
: XRc
;
25190 /* ...and do max/min operation with one operand 0 */
25191 if (opc
== OPC_MXU_S32MAX
) {
25192 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25194 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25196 } else if (unlikely(XRb
== XRc
)) {
25197 /* both operands same -> just set destination to one of them */
25198 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25200 /* the most general case */
25201 if (opc
== OPC_MXU_S32MAX
) {
25202 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25205 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25213 * Update XRa with the 16-bit-wise maximums of signed integers
25214 * contained in XRb and XRc.
25217 * Update XRa with the 16-bit-wise minimums of signed integers
25218 * contained in XRb and XRc.
25220 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25221 * +-----------+---------+-----+-------+-------+-------+-----------+
25222 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25223 * +-----------+---------+-----+-------+-------+-------+-----------+
25225 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25227 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25229 pad
= extract32(ctx
->opcode
, 21, 5);
25230 opc
= extract32(ctx
->opcode
, 18, 3);
25231 XRc
= extract32(ctx
->opcode
, 14, 4);
25232 XRb
= extract32(ctx
->opcode
, 10, 4);
25233 XRa
= extract32(ctx
->opcode
, 6, 4);
25235 if (unlikely(pad
!= 0)) {
25236 /* opcode padding incorrect -> do nothing */
25237 } else if (unlikely(XRc
== 0)) {
25238 /* destination is zero register -> do nothing */
25239 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25240 /* both operands zero registers -> just set destination to zero */
25241 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25242 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25243 /* exactly one operand is zero register - find which one is not...*/
25244 uint32_t XRx
= XRb
? XRb
: XRc
;
25245 /* ...and do half-word-wise max/min with one operand 0 */
25246 TCGv_i32 t0
= tcg_temp_new();
25247 TCGv_i32 t1
= tcg_const_i32(0);
25249 /* the left half-word first */
25250 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25251 if (opc
== OPC_MXU_D16MAX
) {
25252 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25254 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25257 /* the right half-word */
25258 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25259 /* move half-words to the leftmost position */
25260 tcg_gen_shli_i32(t0
, t0
, 16);
25261 /* t0 will be max/min of t0 and t1 */
25262 if (opc
== OPC_MXU_D16MAX
) {
25263 tcg_gen_smax_i32(t0
, t0
, t1
);
25265 tcg_gen_smin_i32(t0
, t0
, t1
);
25267 /* return resulting half-words to its original position */
25268 tcg_gen_shri_i32(t0
, t0
, 16);
25269 /* finaly update the destination */
25270 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25274 } else if (unlikely(XRb
== XRc
)) {
25275 /* both operands same -> just set destination to one of them */
25276 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25278 /* the most general case */
25279 TCGv_i32 t0
= tcg_temp_new();
25280 TCGv_i32 t1
= tcg_temp_new();
25282 /* the left half-word first */
25283 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25284 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25285 if (opc
== OPC_MXU_D16MAX
) {
25286 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25288 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25291 /* the right half-word */
25292 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25293 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25294 /* move half-words to the leftmost position */
25295 tcg_gen_shli_i32(t0
, t0
, 16);
25296 tcg_gen_shli_i32(t1
, t1
, 16);
25297 /* t0 will be max/min of t0 and t1 */
25298 if (opc
== OPC_MXU_D16MAX
) {
25299 tcg_gen_smax_i32(t0
, t0
, t1
);
25301 tcg_gen_smin_i32(t0
, t0
, t1
);
25303 /* return resulting half-words to its original position */
25304 tcg_gen_shri_i32(t0
, t0
, 16);
25305 /* finaly update the destination */
25306 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25315 * Update XRa with the 8-bit-wise maximums of signed integers
25316 * contained in XRb and XRc.
25319 * Update XRa with the 8-bit-wise minimums of signed integers
25320 * contained in XRb and XRc.
25322 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25323 * +-----------+---------+-----+-------+-------+-------+-----------+
25324 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25325 * +-----------+---------+-----+-------+-------+-------+-----------+
25327 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25329 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25331 pad
= extract32(ctx
->opcode
, 21, 5);
25332 opc
= extract32(ctx
->opcode
, 18, 3);
25333 XRc
= extract32(ctx
->opcode
, 14, 4);
25334 XRb
= extract32(ctx
->opcode
, 10, 4);
25335 XRa
= extract32(ctx
->opcode
, 6, 4);
25337 if (unlikely(pad
!= 0)) {
25338 /* opcode padding incorrect -> do nothing */
25339 } else if (unlikely(XRa
== 0)) {
25340 /* destination is zero register -> do nothing */
25341 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25342 /* both operands zero registers -> just set destination to zero */
25343 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25344 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25345 /* exactly one operand is zero register - make it be the first...*/
25346 uint32_t XRx
= XRb
? XRb
: XRc
;
25347 /* ...and do byte-wise max/min with one operand 0 */
25348 TCGv_i32 t0
= tcg_temp_new();
25349 TCGv_i32 t1
= tcg_const_i32(0);
25352 /* the leftmost byte (byte 3) first */
25353 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25354 if (opc
== OPC_MXU_Q8MAX
) {
25355 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25357 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25360 /* bytes 2, 1, 0 */
25361 for (i
= 2; i
>= 0; i
--) {
25362 /* extract the byte */
25363 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25364 /* move the byte to the leftmost position */
25365 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25366 /* t0 will be max/min of t0 and t1 */
25367 if (opc
== OPC_MXU_Q8MAX
) {
25368 tcg_gen_smax_i32(t0
, t0
, t1
);
25370 tcg_gen_smin_i32(t0
, t0
, t1
);
25372 /* return resulting byte to its original position */
25373 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25374 /* finaly update the destination */
25375 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25380 } else if (unlikely(XRb
== XRc
)) {
25381 /* both operands same -> just set destination to one of them */
25382 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25384 /* the most general case */
25385 TCGv_i32 t0
= tcg_temp_new();
25386 TCGv_i32 t1
= tcg_temp_new();
25389 /* the leftmost bytes (bytes 3) first */
25390 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25391 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25392 if (opc
== OPC_MXU_Q8MAX
) {
25393 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25395 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25398 /* bytes 2, 1, 0 */
25399 for (i
= 2; i
>= 0; i
--) {
25400 /* extract corresponding bytes */
25401 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25402 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25403 /* move the bytes to the leftmost position */
25404 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25405 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25406 /* t0 will be max/min of t0 and t1 */
25407 if (opc
== OPC_MXU_Q8MAX
) {
25408 tcg_gen_smax_i32(t0
, t0
, t1
);
25410 tcg_gen_smin_i32(t0
, t0
, t1
);
25412 /* return resulting byte to its original position */
25413 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25414 /* finaly update the destination */
25415 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25425 * MXU instruction category: align
25426 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25432 * S32ALNI XRc, XRb, XRa, optn3
25433 * Arrange bytes from XRb and XRc according to one of five sets of
25434 * rules determined by optn3, and place the result in XRa.
25436 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25437 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25438 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25439 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25442 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25444 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25446 optn3
= extract32(ctx
->opcode
, 23, 3);
25447 pad
= extract32(ctx
->opcode
, 21, 2);
25448 XRc
= extract32(ctx
->opcode
, 14, 4);
25449 XRb
= extract32(ctx
->opcode
, 10, 4);
25450 XRa
= extract32(ctx
->opcode
, 6, 4);
25452 if (unlikely(pad
!= 0)) {
25453 /* opcode padding incorrect -> do nothing */
25454 } else if (unlikely(XRa
== 0)) {
25455 /* destination is zero register -> do nothing */
25456 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25457 /* both operands zero registers -> just set destination to all 0s */
25458 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25459 } else if (unlikely(XRb
== 0)) {
25460 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25462 case MXU_OPTN3_PTN0
:
25463 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25465 case MXU_OPTN3_PTN1
:
25466 case MXU_OPTN3_PTN2
:
25467 case MXU_OPTN3_PTN3
:
25468 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25471 case MXU_OPTN3_PTN4
:
25472 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25475 } else if (unlikely(XRc
== 0)) {
25476 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25478 case MXU_OPTN3_PTN0
:
25479 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25481 case MXU_OPTN3_PTN1
:
25482 case MXU_OPTN3_PTN2
:
25483 case MXU_OPTN3_PTN3
:
25484 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25486 case MXU_OPTN3_PTN4
:
25487 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25490 } else if (unlikely(XRb
== XRc
)) {
25491 /* both operands same -> just rotation or moving from any of them */
25493 case MXU_OPTN3_PTN0
:
25494 case MXU_OPTN3_PTN4
:
25495 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25497 case MXU_OPTN3_PTN1
:
25498 case MXU_OPTN3_PTN2
:
25499 case MXU_OPTN3_PTN3
:
25500 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25504 /* the most general case */
25506 case MXU_OPTN3_PTN0
:
25510 /* +---------------+ */
25511 /* | A B C D | E F G H */
25512 /* +-------+-------+ */
25517 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25520 case MXU_OPTN3_PTN1
:
25524 /* +-------------------+ */
25525 /* A | B C D E | F G H */
25526 /* +---------+---------+ */
25531 TCGv_i32 t0
= tcg_temp_new();
25532 TCGv_i32 t1
= tcg_temp_new();
25534 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25535 tcg_gen_shli_i32(t0
, t0
, 8);
25537 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25538 tcg_gen_shri_i32(t1
, t1
, 24);
25540 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25546 case MXU_OPTN3_PTN2
:
25550 /* +-------------------+ */
25551 /* A B | C D E F | G H */
25552 /* +---------+---------+ */
25557 TCGv_i32 t0
= tcg_temp_new();
25558 TCGv_i32 t1
= tcg_temp_new();
25560 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25561 tcg_gen_shli_i32(t0
, t0
, 16);
25563 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25564 tcg_gen_shri_i32(t1
, t1
, 16);
25566 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25572 case MXU_OPTN3_PTN3
:
25576 /* +-------------------+ */
25577 /* A B C | D E F G | H */
25578 /* +---------+---------+ */
25583 TCGv_i32 t0
= tcg_temp_new();
25584 TCGv_i32 t1
= tcg_temp_new();
25586 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25587 tcg_gen_shli_i32(t0
, t0
, 24);
25589 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25590 tcg_gen_shri_i32(t1
, t1
, 8);
25592 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25598 case MXU_OPTN3_PTN4
:
25602 /* +---------------+ */
25603 /* A B C D | E F G H | */
25604 /* +-------+-------+ */
25609 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25618 * Decoding engine for MXU
25619 * =======================
25624 * Decode MXU pool00
25626 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25627 * +-----------+---------+-----+-------+-------+-------+-----------+
25628 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25629 * +-----------+---------+-----+-------+-------+-------+-----------+
25632 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25634 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25637 case OPC_MXU_S32MAX
:
25638 case OPC_MXU_S32MIN
:
25639 gen_mxu_S32MAX_S32MIN(ctx
);
25641 case OPC_MXU_D16MAX
:
25642 case OPC_MXU_D16MIN
:
25643 gen_mxu_D16MAX_D16MIN(ctx
);
25645 case OPC_MXU_Q8MAX
:
25646 case OPC_MXU_Q8MIN
:
25647 gen_mxu_Q8MAX_Q8MIN(ctx
);
25649 case OPC_MXU_Q8SLT
:
25650 /* TODO: Implement emulation of Q8SLT instruction. */
25651 MIPS_INVAL("OPC_MXU_Q8SLT");
25652 generate_exception_end(ctx
, EXCP_RI
);
25654 case OPC_MXU_Q8SLTU
:
25655 /* TODO: Implement emulation of Q8SLTU instruction. */
25656 MIPS_INVAL("OPC_MXU_Q8SLTU");
25657 generate_exception_end(ctx
, EXCP_RI
);
25660 MIPS_INVAL("decode_opc_mxu");
25661 generate_exception_end(ctx
, EXCP_RI
);
25668 * Decode MXU pool01
25670 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25671 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25672 * +-----------+---------+-----+-------+-------+-------+-----------+
25673 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25674 * +-----------+---------+-----+-------+-------+-------+-----------+
25677 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25678 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25679 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25680 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25683 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25685 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25688 case OPC_MXU_S32SLT
:
25689 /* TODO: Implement emulation of S32SLT instruction. */
25690 MIPS_INVAL("OPC_MXU_S32SLT");
25691 generate_exception_end(ctx
, EXCP_RI
);
25693 case OPC_MXU_D16SLT
:
25694 /* TODO: Implement emulation of D16SLT instruction. */
25695 MIPS_INVAL("OPC_MXU_D16SLT");
25696 generate_exception_end(ctx
, EXCP_RI
);
25698 case OPC_MXU_D16AVG
:
25699 /* TODO: Implement emulation of D16AVG instruction. */
25700 MIPS_INVAL("OPC_MXU_D16AVG");
25701 generate_exception_end(ctx
, EXCP_RI
);
25703 case OPC_MXU_D16AVGR
:
25704 /* TODO: Implement emulation of D16AVGR instruction. */
25705 MIPS_INVAL("OPC_MXU_D16AVGR");
25706 generate_exception_end(ctx
, EXCP_RI
);
25708 case OPC_MXU_Q8AVG
:
25709 /* TODO: Implement emulation of Q8AVG instruction. */
25710 MIPS_INVAL("OPC_MXU_Q8AVG");
25711 generate_exception_end(ctx
, EXCP_RI
);
25713 case OPC_MXU_Q8AVGR
:
25714 /* TODO: Implement emulation of Q8AVGR instruction. */
25715 MIPS_INVAL("OPC_MXU_Q8AVGR");
25716 generate_exception_end(ctx
, EXCP_RI
);
25718 case OPC_MXU_Q8ADD
:
25719 /* TODO: Implement emulation of Q8ADD instruction. */
25720 MIPS_INVAL("OPC_MXU_Q8ADD");
25721 generate_exception_end(ctx
, EXCP_RI
);
25724 MIPS_INVAL("decode_opc_mxu");
25725 generate_exception_end(ctx
, EXCP_RI
);
25732 * Decode MXU pool02
25734 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25735 * +-----------+---------+-----+-------+-------+-------+-----------+
25736 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25737 * +-----------+---------+-----+-------+-------+-------+-----------+
25740 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25742 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25745 case OPC_MXU_S32CPS
:
25746 /* TODO: Implement emulation of S32CPS instruction. */
25747 MIPS_INVAL("OPC_MXU_S32CPS");
25748 generate_exception_end(ctx
, EXCP_RI
);
25750 case OPC_MXU_D16CPS
:
25751 /* TODO: Implement emulation of D16CPS instruction. */
25752 MIPS_INVAL("OPC_MXU_D16CPS");
25753 generate_exception_end(ctx
, EXCP_RI
);
25755 case OPC_MXU_Q8ABD
:
25756 /* TODO: Implement emulation of Q8ABD instruction. */
25757 MIPS_INVAL("OPC_MXU_Q8ABD");
25758 generate_exception_end(ctx
, EXCP_RI
);
25760 case OPC_MXU_Q16SAT
:
25761 /* TODO: Implement emulation of Q16SAT instruction. */
25762 MIPS_INVAL("OPC_MXU_Q16SAT");
25763 generate_exception_end(ctx
, EXCP_RI
);
25766 MIPS_INVAL("decode_opc_mxu");
25767 generate_exception_end(ctx
, EXCP_RI
);
25774 * Decode MXU pool03
25777 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25778 * +-----------+---+---+-------+-------+-------+-------+-----------+
25779 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25780 * +-----------+---+---+-------+-------+-------+-------+-----------+
25783 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25784 * +-----------+---+---+-------+-------+-------+-------+-----------+
25785 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25786 * +-----------+---+---+-------+-------+-------+-------+-----------+
25789 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25791 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25794 case OPC_MXU_D16MULF
:
25795 /* TODO: Implement emulation of D16MULF instruction. */
25796 MIPS_INVAL("OPC_MXU_D16MULF");
25797 generate_exception_end(ctx
, EXCP_RI
);
25799 case OPC_MXU_D16MULE
:
25800 /* TODO: Implement emulation of D16MULE instruction. */
25801 MIPS_INVAL("OPC_MXU_D16MULE");
25802 generate_exception_end(ctx
, EXCP_RI
);
25805 MIPS_INVAL("decode_opc_mxu");
25806 generate_exception_end(ctx
, EXCP_RI
);
25813 * Decode MXU pool04
25815 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25816 * +-----------+---------+-+-------------------+-------+-----------+
25817 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25818 * +-----------+---------+-+-------------------+-------+-----------+
25821 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25823 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25826 case OPC_MXU_S32LDD
:
25827 case OPC_MXU_S32LDDR
:
25828 gen_mxu_s32ldd_s32lddr(ctx
);
25831 MIPS_INVAL("decode_opc_mxu");
25832 generate_exception_end(ctx
, EXCP_RI
);
25839 * Decode MXU pool05
25841 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25842 * +-----------+---------+-+-------------------+-------+-----------+
25843 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25844 * +-----------+---------+-+-------------------+-------+-----------+
25847 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25849 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25852 case OPC_MXU_S32STD
:
25853 /* TODO: Implement emulation of S32STD instruction. */
25854 MIPS_INVAL("OPC_MXU_S32STD");
25855 generate_exception_end(ctx
, EXCP_RI
);
25857 case OPC_MXU_S32STDR
:
25858 /* TODO: Implement emulation of S32STDR instruction. */
25859 MIPS_INVAL("OPC_MXU_S32STDR");
25860 generate_exception_end(ctx
, EXCP_RI
);
25863 MIPS_INVAL("decode_opc_mxu");
25864 generate_exception_end(ctx
, EXCP_RI
);
25871 * Decode MXU pool06
25873 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25874 * +-----------+---------+---------+---+-------+-------+-----------+
25875 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25876 * +-----------+---------+---------+---+-------+-------+-----------+
25879 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25881 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25884 case OPC_MXU_S32LDDV
:
25885 /* TODO: Implement emulation of S32LDDV instruction. */
25886 MIPS_INVAL("OPC_MXU_S32LDDV");
25887 generate_exception_end(ctx
, EXCP_RI
);
25889 case OPC_MXU_S32LDDVR
:
25890 /* TODO: Implement emulation of S32LDDVR instruction. */
25891 MIPS_INVAL("OPC_MXU_S32LDDVR");
25892 generate_exception_end(ctx
, EXCP_RI
);
25895 MIPS_INVAL("decode_opc_mxu");
25896 generate_exception_end(ctx
, EXCP_RI
);
25903 * Decode MXU pool07
25905 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25906 * +-----------+---------+---------+---+-------+-------+-----------+
25907 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
25908 * +-----------+---------+---------+---+-------+-------+-----------+
25911 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
25913 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25916 case OPC_MXU_S32STDV
:
25917 /* TODO: Implement emulation of S32TDV instruction. */
25918 MIPS_INVAL("OPC_MXU_S32TDV");
25919 generate_exception_end(ctx
, EXCP_RI
);
25921 case OPC_MXU_S32STDVR
:
25922 /* TODO: Implement emulation of S32TDVR instruction. */
25923 MIPS_INVAL("OPC_MXU_S32TDVR");
25924 generate_exception_end(ctx
, EXCP_RI
);
25927 MIPS_INVAL("decode_opc_mxu");
25928 generate_exception_end(ctx
, EXCP_RI
);
25935 * Decode MXU pool08
25937 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25938 * +-----------+---------+-+-------------------+-------+-----------+
25939 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
25940 * +-----------+---------+-+-------------------+-------+-----------+
25943 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
25945 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25948 case OPC_MXU_S32LDI
:
25949 /* TODO: Implement emulation of S32LDI instruction. */
25950 MIPS_INVAL("OPC_MXU_S32LDI");
25951 generate_exception_end(ctx
, EXCP_RI
);
25953 case OPC_MXU_S32LDIR
:
25954 /* TODO: Implement emulation of S32LDIR instruction. */
25955 MIPS_INVAL("OPC_MXU_S32LDIR");
25956 generate_exception_end(ctx
, EXCP_RI
);
25959 MIPS_INVAL("decode_opc_mxu");
25960 generate_exception_end(ctx
, EXCP_RI
);
25967 * Decode MXU pool09
25969 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25970 * +-----------+---------+-+-------------------+-------+-----------+
25971 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
25972 * +-----------+---------+-+-------------------+-------+-----------+
25975 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
25977 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
25980 case OPC_MXU_S32SDI
:
25981 /* TODO: Implement emulation of S32SDI instruction. */
25982 MIPS_INVAL("OPC_MXU_S32SDI");
25983 generate_exception_end(ctx
, EXCP_RI
);
25985 case OPC_MXU_S32SDIR
:
25986 /* TODO: Implement emulation of S32SDIR instruction. */
25987 MIPS_INVAL("OPC_MXU_S32SDIR");
25988 generate_exception_end(ctx
, EXCP_RI
);
25991 MIPS_INVAL("decode_opc_mxu");
25992 generate_exception_end(ctx
, EXCP_RI
);
25999 * Decode MXU pool10
26001 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26002 * +-----------+---------+---------+---+-------+-------+-----------+
26003 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
26004 * +-----------+---------+---------+---+-------+-------+-----------+
26007 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
26009 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26012 case OPC_MXU_S32LDIV
:
26013 /* TODO: Implement emulation of S32LDIV instruction. */
26014 MIPS_INVAL("OPC_MXU_S32LDIV");
26015 generate_exception_end(ctx
, EXCP_RI
);
26017 case OPC_MXU_S32LDIVR
:
26018 /* TODO: Implement emulation of S32LDIVR instruction. */
26019 MIPS_INVAL("OPC_MXU_S32LDIVR");
26020 generate_exception_end(ctx
, EXCP_RI
);
26023 MIPS_INVAL("decode_opc_mxu");
26024 generate_exception_end(ctx
, EXCP_RI
);
26031 * Decode MXU pool11
26033 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26034 * +-----------+---------+---------+---+-------+-------+-----------+
26035 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
26036 * +-----------+---------+---------+---+-------+-------+-----------+
26039 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
26041 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26044 case OPC_MXU_S32SDIV
:
26045 /* TODO: Implement emulation of S32SDIV instruction. */
26046 MIPS_INVAL("OPC_MXU_S32SDIV");
26047 generate_exception_end(ctx
, EXCP_RI
);
26049 case OPC_MXU_S32SDIVR
:
26050 /* TODO: Implement emulation of S32SDIVR instruction. */
26051 MIPS_INVAL("OPC_MXU_S32SDIVR");
26052 generate_exception_end(ctx
, EXCP_RI
);
26055 MIPS_INVAL("decode_opc_mxu");
26056 generate_exception_end(ctx
, EXCP_RI
);
26063 * Decode MXU pool12
26065 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26066 * +-----------+---+---+-------+-------+-------+-------+-----------+
26067 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
26068 * +-----------+---+---+-------+-------+-------+-------+-----------+
26071 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
26073 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26076 case OPC_MXU_D32ACC
:
26077 /* TODO: Implement emulation of D32ACC instruction. */
26078 MIPS_INVAL("OPC_MXU_D32ACC");
26079 generate_exception_end(ctx
, EXCP_RI
);
26081 case OPC_MXU_D32ACCM
:
26082 /* TODO: Implement emulation of D32ACCM instruction. */
26083 MIPS_INVAL("OPC_MXU_D32ACCM");
26084 generate_exception_end(ctx
, EXCP_RI
);
26086 case OPC_MXU_D32ASUM
:
26087 /* TODO: Implement emulation of D32ASUM instruction. */
26088 MIPS_INVAL("OPC_MXU_D32ASUM");
26089 generate_exception_end(ctx
, EXCP_RI
);
26092 MIPS_INVAL("decode_opc_mxu");
26093 generate_exception_end(ctx
, EXCP_RI
);
26100 * Decode MXU pool13
26102 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26103 * +-----------+---+---+-------+-------+-------+-------+-----------+
26104 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
26105 * +-----------+---+---+-------+-------+-------+-------+-----------+
26108 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
26110 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26113 case OPC_MXU_Q16ACC
:
26114 /* TODO: Implement emulation of Q16ACC instruction. */
26115 MIPS_INVAL("OPC_MXU_Q16ACC");
26116 generate_exception_end(ctx
, EXCP_RI
);
26118 case OPC_MXU_Q16ACCM
:
26119 /* TODO: Implement emulation of Q16ACCM instruction. */
26120 MIPS_INVAL("OPC_MXU_Q16ACCM");
26121 generate_exception_end(ctx
, EXCP_RI
);
26123 case OPC_MXU_Q16ASUM
:
26124 /* TODO: Implement emulation of Q16ASUM instruction. */
26125 MIPS_INVAL("OPC_MXU_Q16ASUM");
26126 generate_exception_end(ctx
, EXCP_RI
);
26129 MIPS_INVAL("decode_opc_mxu");
26130 generate_exception_end(ctx
, EXCP_RI
);
26137 * Decode MXU pool14
26140 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26141 * +-----------+---+---+-------+-------+-------+-------+-----------+
26142 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
26143 * +-----------+---+---+-------+-------+-------+-------+-----------+
26146 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26147 * +-----------+---+---+-------+-------+-------+-------+-----------+
26148 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
26149 * +-----------+---+---+-------+-------+-------+-------+-----------+
26152 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
26154 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26157 case OPC_MXU_Q8ADDE
:
26158 /* TODO: Implement emulation of Q8ADDE instruction. */
26159 MIPS_INVAL("OPC_MXU_Q8ADDE");
26160 generate_exception_end(ctx
, EXCP_RI
);
26162 case OPC_MXU_D8SUM
:
26163 /* TODO: Implement emulation of D8SUM instruction. */
26164 MIPS_INVAL("OPC_MXU_D8SUM");
26165 generate_exception_end(ctx
, EXCP_RI
);
26167 case OPC_MXU_D8SUMC
:
26168 /* TODO: Implement emulation of D8SUMC instruction. */
26169 MIPS_INVAL("OPC_MXU_D8SUMC");
26170 generate_exception_end(ctx
, EXCP_RI
);
26173 MIPS_INVAL("decode_opc_mxu");
26174 generate_exception_end(ctx
, EXCP_RI
);
26181 * Decode MXU pool15
26183 * S32MUL, S32MULU, S32EXTRV:
26184 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26185 * +-----------+---------+---------+---+-------+-------+-----------+
26186 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
26187 * +-----------+---------+---------+---+-------+-------+-----------+
26190 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26191 * +-----------+---------+---------+---+-------+-------+-----------+
26192 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
26193 * +-----------+---------+---------+---+-------+-------+-----------+
26196 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
26198 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
26201 case OPC_MXU_S32MUL
:
26202 /* TODO: Implement emulation of S32MUL instruction. */
26203 MIPS_INVAL("OPC_MXU_S32MUL");
26204 generate_exception_end(ctx
, EXCP_RI
);
26206 case OPC_MXU_S32MULU
:
26207 /* TODO: Implement emulation of S32MULU instruction. */
26208 MIPS_INVAL("OPC_MXU_S32MULU");
26209 generate_exception_end(ctx
, EXCP_RI
);
26211 case OPC_MXU_S32EXTR
:
26212 /* TODO: Implement emulation of S32EXTR instruction. */
26213 MIPS_INVAL("OPC_MXU_S32EXTR");
26214 generate_exception_end(ctx
, EXCP_RI
);
26216 case OPC_MXU_S32EXTRV
:
26217 /* TODO: Implement emulation of S32EXTRV instruction. */
26218 MIPS_INVAL("OPC_MXU_S32EXTRV");
26219 generate_exception_end(ctx
, EXCP_RI
);
26222 MIPS_INVAL("decode_opc_mxu");
26223 generate_exception_end(ctx
, EXCP_RI
);
26230 * Decode MXU pool16
26233 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26234 * +-----------+---------+-----+-------+-------+-------+-----------+
26235 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26236 * +-----------+---------+-----+-------+-------+-------+-----------+
26239 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26240 * +-----------+---------+-----+-------+-------+-------+-----------+
26241 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26242 * +-----------+---------+-----+-------+-------+-------+-----------+
26245 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26246 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26247 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26248 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26251 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26252 * +-----------+-----+---+-----+-------+---------------+-----------+
26253 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26254 * +-----------+-----+---+-----+-------+---------------+-----------+
26256 * S32NOR, S32AND, S32OR, S32XOR:
26257 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26258 * +-----------+---------+-----+-------+-------+-------+-----------+
26259 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26260 * +-----------+---------+-----+-------+-------+-------+-----------+
26263 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26265 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26268 case OPC_MXU_D32SARW
:
26269 /* TODO: Implement emulation of D32SARW instruction. */
26270 MIPS_INVAL("OPC_MXU_D32SARW");
26271 generate_exception_end(ctx
, EXCP_RI
);
26273 case OPC_MXU_S32ALN
:
26274 /* TODO: Implement emulation of S32ALN instruction. */
26275 MIPS_INVAL("OPC_MXU_S32ALN");
26276 generate_exception_end(ctx
, EXCP_RI
);
26278 case OPC_MXU_S32ALNI
:
26279 gen_mxu_S32ALNI(ctx
);
26281 case OPC_MXU_S32LUI
:
26282 /* TODO: Implement emulation of S32LUI instruction. */
26283 MIPS_INVAL("OPC_MXU_S32LUI");
26284 generate_exception_end(ctx
, EXCP_RI
);
26286 case OPC_MXU_S32NOR
:
26287 gen_mxu_S32NOR(ctx
);
26289 case OPC_MXU_S32AND
:
26290 gen_mxu_S32AND(ctx
);
26292 case OPC_MXU_S32OR
:
26293 gen_mxu_S32OR(ctx
);
26295 case OPC_MXU_S32XOR
:
26296 gen_mxu_S32XOR(ctx
);
26299 MIPS_INVAL("decode_opc_mxu");
26300 generate_exception_end(ctx
, EXCP_RI
);
26307 * Decode MXU pool17
26309 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26310 * +-----------+---------+---------+---+---------+-----+-----------+
26311 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26312 * +-----------+---------+---------+---+---------+-----+-----------+
26315 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26317 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26321 /* TODO: Implement emulation of LXW instruction. */
26322 MIPS_INVAL("OPC_MXU_LXW");
26323 generate_exception_end(ctx
, EXCP_RI
);
26326 /* TODO: Implement emulation of LXH instruction. */
26327 MIPS_INVAL("OPC_MXU_LXH");
26328 generate_exception_end(ctx
, EXCP_RI
);
26331 /* TODO: Implement emulation of LXHU instruction. */
26332 MIPS_INVAL("OPC_MXU_LXHU");
26333 generate_exception_end(ctx
, EXCP_RI
);
26336 /* TODO: Implement emulation of LXB instruction. */
26337 MIPS_INVAL("OPC_MXU_LXB");
26338 generate_exception_end(ctx
, EXCP_RI
);
26341 /* TODO: Implement emulation of LXBU instruction. */
26342 MIPS_INVAL("OPC_MXU_LXBU");
26343 generate_exception_end(ctx
, EXCP_RI
);
26346 MIPS_INVAL("decode_opc_mxu");
26347 generate_exception_end(ctx
, EXCP_RI
);
26353 * Decode MXU pool18
26355 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26356 * +-----------+---------+-----+-------+-------+-------+-----------+
26357 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26358 * +-----------+---------+-----+-------+-------+-------+-----------+
26361 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26363 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26366 case OPC_MXU_D32SLLV
:
26367 /* TODO: Implement emulation of D32SLLV instruction. */
26368 MIPS_INVAL("OPC_MXU_D32SLLV");
26369 generate_exception_end(ctx
, EXCP_RI
);
26371 case OPC_MXU_D32SLRV
:
26372 /* TODO: Implement emulation of D32SLRV instruction. */
26373 MIPS_INVAL("OPC_MXU_D32SLRV");
26374 generate_exception_end(ctx
, EXCP_RI
);
26376 case OPC_MXU_D32SARV
:
26377 /* TODO: Implement emulation of D32SARV instruction. */
26378 MIPS_INVAL("OPC_MXU_D32SARV");
26379 generate_exception_end(ctx
, EXCP_RI
);
26381 case OPC_MXU_Q16SLLV
:
26382 /* TODO: Implement emulation of Q16SLLV instruction. */
26383 MIPS_INVAL("OPC_MXU_Q16SLLV");
26384 generate_exception_end(ctx
, EXCP_RI
);
26386 case OPC_MXU_Q16SLRV
:
26387 /* TODO: Implement emulation of Q16SLRV instruction. */
26388 MIPS_INVAL("OPC_MXU_Q16SLRV");
26389 generate_exception_end(ctx
, EXCP_RI
);
26391 case OPC_MXU_Q16SARV
:
26392 /* TODO: Implement emulation of Q16SARV instruction. */
26393 MIPS_INVAL("OPC_MXU_Q16SARV");
26394 generate_exception_end(ctx
, EXCP_RI
);
26397 MIPS_INVAL("decode_opc_mxu");
26398 generate_exception_end(ctx
, EXCP_RI
);
26405 * Decode MXU pool19
26407 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26408 * +-----------+---+---+-------+-------+-------+-------+-----------+
26409 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26410 * +-----------+---+---+-------+-------+-------+-------+-----------+
26413 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26415 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26418 case OPC_MXU_Q8MUL
:
26419 case OPC_MXU_Q8MULSU
:
26420 gen_mxu_q8mul_q8mulsu(ctx
);
26423 MIPS_INVAL("decode_opc_mxu");
26424 generate_exception_end(ctx
, EXCP_RI
);
26431 * Decode MXU pool20
26433 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26434 * +-----------+---------+-----+-------+-------+-------+-----------+
26435 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26436 * +-----------+---------+-----+-------+-------+-------+-----------+
26439 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26441 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26444 case OPC_MXU_Q8MOVZ
:
26445 /* TODO: Implement emulation of Q8MOVZ instruction. */
26446 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26447 generate_exception_end(ctx
, EXCP_RI
);
26449 case OPC_MXU_Q8MOVN
:
26450 /* TODO: Implement emulation of Q8MOVN instruction. */
26451 MIPS_INVAL("OPC_MXU_Q8MOVN");
26452 generate_exception_end(ctx
, EXCP_RI
);
26454 case OPC_MXU_D16MOVZ
:
26455 /* TODO: Implement emulation of D16MOVZ instruction. */
26456 MIPS_INVAL("OPC_MXU_D16MOVZ");
26457 generate_exception_end(ctx
, EXCP_RI
);
26459 case OPC_MXU_D16MOVN
:
26460 /* TODO: Implement emulation of D16MOVN instruction. */
26461 MIPS_INVAL("OPC_MXU_D16MOVN");
26462 generate_exception_end(ctx
, EXCP_RI
);
26464 case OPC_MXU_S32MOVZ
:
26465 /* TODO: Implement emulation of S32MOVZ instruction. */
26466 MIPS_INVAL("OPC_MXU_S32MOVZ");
26467 generate_exception_end(ctx
, EXCP_RI
);
26469 case OPC_MXU_S32MOVN
:
26470 /* TODO: Implement emulation of S32MOVN instruction. */
26471 MIPS_INVAL("OPC_MXU_S32MOVN");
26472 generate_exception_end(ctx
, EXCP_RI
);
26475 MIPS_INVAL("decode_opc_mxu");
26476 generate_exception_end(ctx
, EXCP_RI
);
26483 * Decode MXU pool21
26485 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26486 * +-----------+---+---+-------+-------+-------+-------+-----------+
26487 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26488 * +-----------+---+---+-------+-------+-------+-------+-----------+
26491 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26493 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26496 case OPC_MXU_Q8MAC
:
26497 /* TODO: Implement emulation of Q8MAC instruction. */
26498 MIPS_INVAL("OPC_MXU_Q8MAC");
26499 generate_exception_end(ctx
, EXCP_RI
);
26501 case OPC_MXU_Q8MACSU
:
26502 /* TODO: Implement emulation of Q8MACSU instruction. */
26503 MIPS_INVAL("OPC_MXU_Q8MACSU");
26504 generate_exception_end(ctx
, EXCP_RI
);
26507 MIPS_INVAL("decode_opc_mxu");
26508 generate_exception_end(ctx
, EXCP_RI
);
26515 * Main MXU decoding function
26517 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26518 * +-----------+---------------------------------------+-----------+
26519 * | SPECIAL2 | |x x x x x x|
26520 * +-----------+---------------------------------------+-----------+
26523 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26526 * TODO: Investigate necessity of including handling of
26527 * CLZ, CLO, SDBB in this function, as they belong to
26528 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26530 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26532 if (opcode
== OPC__MXU_MUL
) {
26533 uint32_t rs
, rt
, rd
, op1
;
26535 rs
= extract32(ctx
->opcode
, 21, 5);
26536 rt
= extract32(ctx
->opcode
, 16, 5);
26537 rd
= extract32(ctx
->opcode
, 11, 5);
26538 op1
= MASK_SPECIAL2(ctx
->opcode
);
26540 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26545 if (opcode
== OPC_MXU_S32M2I
) {
26546 gen_mxu_s32m2i(ctx
);
26550 if (opcode
== OPC_MXU_S32I2M
) {
26551 gen_mxu_s32i2m(ctx
);
26556 TCGv t_mxu_cr
= tcg_temp_new();
26557 TCGLabel
*l_exit
= gen_new_label();
26559 gen_load_mxu_cr(t_mxu_cr
);
26560 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26561 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26564 case OPC_MXU_S32MADD
:
26565 /* TODO: Implement emulation of S32MADD instruction. */
26566 MIPS_INVAL("OPC_MXU_S32MADD");
26567 generate_exception_end(ctx
, EXCP_RI
);
26569 case OPC_MXU_S32MADDU
:
26570 /* TODO: Implement emulation of S32MADDU instruction. */
26571 MIPS_INVAL("OPC_MXU_S32MADDU");
26572 generate_exception_end(ctx
, EXCP_RI
);
26574 case OPC_MXU__POOL00
:
26575 decode_opc_mxu__pool00(env
, ctx
);
26577 case OPC_MXU_S32MSUB
:
26578 /* TODO: Implement emulation of S32MSUB instruction. */
26579 MIPS_INVAL("OPC_MXU_S32MSUB");
26580 generate_exception_end(ctx
, EXCP_RI
);
26582 case OPC_MXU_S32MSUBU
:
26583 /* TODO: Implement emulation of S32MSUBU instruction. */
26584 MIPS_INVAL("OPC_MXU_S32MSUBU");
26585 generate_exception_end(ctx
, EXCP_RI
);
26587 case OPC_MXU__POOL01
:
26588 decode_opc_mxu__pool01(env
, ctx
);
26590 case OPC_MXU__POOL02
:
26591 decode_opc_mxu__pool02(env
, ctx
);
26593 case OPC_MXU_D16MUL
:
26594 gen_mxu_d16mul(ctx
);
26596 case OPC_MXU__POOL03
:
26597 decode_opc_mxu__pool03(env
, ctx
);
26599 case OPC_MXU_D16MAC
:
26600 gen_mxu_d16mac(ctx
);
26602 case OPC_MXU_D16MACF
:
26603 /* TODO: Implement emulation of D16MACF instruction. */
26604 MIPS_INVAL("OPC_MXU_D16MACF");
26605 generate_exception_end(ctx
, EXCP_RI
);
26607 case OPC_MXU_D16MADL
:
26608 /* TODO: Implement emulation of D16MADL instruction. */
26609 MIPS_INVAL("OPC_MXU_D16MADL");
26610 generate_exception_end(ctx
, EXCP_RI
);
26612 case OPC_MXU_S16MAD
:
26613 /* TODO: Implement emulation of S16MAD instruction. */
26614 MIPS_INVAL("OPC_MXU_S16MAD");
26615 generate_exception_end(ctx
, EXCP_RI
);
26617 case OPC_MXU_Q16ADD
:
26618 /* TODO: Implement emulation of Q16ADD instruction. */
26619 MIPS_INVAL("OPC_MXU_Q16ADD");
26620 generate_exception_end(ctx
, EXCP_RI
);
26622 case OPC_MXU_D16MACE
:
26623 /* TODO: Implement emulation of D16MACE instruction. */
26624 MIPS_INVAL("OPC_MXU_D16MACE");
26625 generate_exception_end(ctx
, EXCP_RI
);
26627 case OPC_MXU__POOL04
:
26628 decode_opc_mxu__pool04(env
, ctx
);
26630 case OPC_MXU__POOL05
:
26631 decode_opc_mxu__pool05(env
, ctx
);
26633 case OPC_MXU__POOL06
:
26634 decode_opc_mxu__pool06(env
, ctx
);
26636 case OPC_MXU__POOL07
:
26637 decode_opc_mxu__pool07(env
, ctx
);
26639 case OPC_MXU__POOL08
:
26640 decode_opc_mxu__pool08(env
, ctx
);
26642 case OPC_MXU__POOL09
:
26643 decode_opc_mxu__pool09(env
, ctx
);
26645 case OPC_MXU__POOL10
:
26646 decode_opc_mxu__pool10(env
, ctx
);
26648 case OPC_MXU__POOL11
:
26649 decode_opc_mxu__pool11(env
, ctx
);
26651 case OPC_MXU_D32ADD
:
26652 /* TODO: Implement emulation of D32ADD instruction. */
26653 MIPS_INVAL("OPC_MXU_D32ADD");
26654 generate_exception_end(ctx
, EXCP_RI
);
26656 case OPC_MXU__POOL12
:
26657 decode_opc_mxu__pool12(env
, ctx
);
26659 case OPC_MXU__POOL13
:
26660 decode_opc_mxu__pool13(env
, ctx
);
26662 case OPC_MXU__POOL14
:
26663 decode_opc_mxu__pool14(env
, ctx
);
26665 case OPC_MXU_Q8ACCE
:
26666 /* TODO: Implement emulation of Q8ACCE instruction. */
26667 MIPS_INVAL("OPC_MXU_Q8ACCE");
26668 generate_exception_end(ctx
, EXCP_RI
);
26670 case OPC_MXU_S8LDD
:
26671 gen_mxu_s8ldd(ctx
);
26673 case OPC_MXU_S8STD
:
26674 /* TODO: Implement emulation of S8STD instruction. */
26675 MIPS_INVAL("OPC_MXU_S8STD");
26676 generate_exception_end(ctx
, EXCP_RI
);
26678 case OPC_MXU_S8LDI
:
26679 /* TODO: Implement emulation of S8LDI instruction. */
26680 MIPS_INVAL("OPC_MXU_S8LDI");
26681 generate_exception_end(ctx
, EXCP_RI
);
26683 case OPC_MXU_S8SDI
:
26684 /* TODO: Implement emulation of S8SDI instruction. */
26685 MIPS_INVAL("OPC_MXU_S8SDI");
26686 generate_exception_end(ctx
, EXCP_RI
);
26688 case OPC_MXU__POOL15
:
26689 decode_opc_mxu__pool15(env
, ctx
);
26691 case OPC_MXU__POOL16
:
26692 decode_opc_mxu__pool16(env
, ctx
);
26694 case OPC_MXU__POOL17
:
26695 decode_opc_mxu__pool17(env
, ctx
);
26697 case OPC_MXU_S16LDD
:
26698 /* TODO: Implement emulation of S16LDD instruction. */
26699 MIPS_INVAL("OPC_MXU_S16LDD");
26700 generate_exception_end(ctx
, EXCP_RI
);
26702 case OPC_MXU_S16STD
:
26703 /* TODO: Implement emulation of S16STD instruction. */
26704 MIPS_INVAL("OPC_MXU_S16STD");
26705 generate_exception_end(ctx
, EXCP_RI
);
26707 case OPC_MXU_S16LDI
:
26708 /* TODO: Implement emulation of S16LDI instruction. */
26709 MIPS_INVAL("OPC_MXU_S16LDI");
26710 generate_exception_end(ctx
, EXCP_RI
);
26712 case OPC_MXU_S16SDI
:
26713 /* TODO: Implement emulation of S16SDI instruction. */
26714 MIPS_INVAL("OPC_MXU_S16SDI");
26715 generate_exception_end(ctx
, EXCP_RI
);
26717 case OPC_MXU_D32SLL
:
26718 /* TODO: Implement emulation of D32SLL instruction. */
26719 MIPS_INVAL("OPC_MXU_D32SLL");
26720 generate_exception_end(ctx
, EXCP_RI
);
26722 case OPC_MXU_D32SLR
:
26723 /* TODO: Implement emulation of D32SLR instruction. */
26724 MIPS_INVAL("OPC_MXU_D32SLR");
26725 generate_exception_end(ctx
, EXCP_RI
);
26727 case OPC_MXU_D32SARL
:
26728 /* TODO: Implement emulation of D32SARL instruction. */
26729 MIPS_INVAL("OPC_MXU_D32SARL");
26730 generate_exception_end(ctx
, EXCP_RI
);
26732 case OPC_MXU_D32SAR
:
26733 /* TODO: Implement emulation of D32SAR instruction. */
26734 MIPS_INVAL("OPC_MXU_D32SAR");
26735 generate_exception_end(ctx
, EXCP_RI
);
26737 case OPC_MXU_Q16SLL
:
26738 /* TODO: Implement emulation of Q16SLL instruction. */
26739 MIPS_INVAL("OPC_MXU_Q16SLL");
26740 generate_exception_end(ctx
, EXCP_RI
);
26742 case OPC_MXU_Q16SLR
:
26743 /* TODO: Implement emulation of Q16SLR instruction. */
26744 MIPS_INVAL("OPC_MXU_Q16SLR");
26745 generate_exception_end(ctx
, EXCP_RI
);
26747 case OPC_MXU__POOL18
:
26748 decode_opc_mxu__pool18(env
, ctx
);
26750 case OPC_MXU_Q16SAR
:
26751 /* TODO: Implement emulation of Q16SAR instruction. */
26752 MIPS_INVAL("OPC_MXU_Q16SAR");
26753 generate_exception_end(ctx
, EXCP_RI
);
26755 case OPC_MXU__POOL19
:
26756 decode_opc_mxu__pool19(env
, ctx
);
26758 case OPC_MXU__POOL20
:
26759 decode_opc_mxu__pool20(env
, ctx
);
26761 case OPC_MXU__POOL21
:
26762 decode_opc_mxu__pool21(env
, ctx
);
26764 case OPC_MXU_Q16SCOP
:
26765 /* TODO: Implement emulation of Q16SCOP instruction. */
26766 MIPS_INVAL("OPC_MXU_Q16SCOP");
26767 generate_exception_end(ctx
, EXCP_RI
);
26769 case OPC_MXU_Q8MADL
:
26770 /* TODO: Implement emulation of Q8MADL instruction. */
26771 MIPS_INVAL("OPC_MXU_Q8MADL");
26772 generate_exception_end(ctx
, EXCP_RI
);
26774 case OPC_MXU_S32SFL
:
26775 /* TODO: Implement emulation of S32SFL instruction. */
26776 MIPS_INVAL("OPC_MXU_S32SFL");
26777 generate_exception_end(ctx
, EXCP_RI
);
26779 case OPC_MXU_Q8SAD
:
26780 /* TODO: Implement emulation of Q8SAD instruction. */
26781 MIPS_INVAL("OPC_MXU_Q8SAD");
26782 generate_exception_end(ctx
, EXCP_RI
);
26785 MIPS_INVAL("decode_opc_mxu");
26786 generate_exception_end(ctx
, EXCP_RI
);
26789 gen_set_label(l_exit
);
26790 tcg_temp_free(t_mxu_cr
);
26794 #endif /* !defined(TARGET_MIPS64) */
26797 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26802 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26804 rs
= (ctx
->opcode
>> 21) & 0x1f;
26805 rt
= (ctx
->opcode
>> 16) & 0x1f;
26806 rd
= (ctx
->opcode
>> 11) & 0x1f;
26808 op1
= MASK_SPECIAL2(ctx
->opcode
);
26810 case OPC_MADD
: /* Multiply and add/sub */
26814 check_insn(ctx
, ISA_MIPS32
);
26815 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26818 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26821 case OPC_DIVU_G_2F
:
26822 case OPC_MULT_G_2F
:
26823 case OPC_MULTU_G_2F
:
26825 case OPC_MODU_G_2F
:
26826 check_insn(ctx
, INSN_LOONGSON2F
);
26827 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26831 check_insn(ctx
, ISA_MIPS32
);
26832 gen_cl(ctx
, op1
, rd
, rs
);
26835 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26836 gen_helper_do_semihosting(cpu_env
);
26838 /* XXX: not clear which exception should be raised
26839 * when in debug mode...
26841 check_insn(ctx
, ISA_MIPS32
);
26842 generate_exception_end(ctx
, EXCP_DBp
);
26845 #if defined(TARGET_MIPS64)
26848 check_insn(ctx
, ISA_MIPS64
);
26849 check_mips_64(ctx
);
26850 gen_cl(ctx
, op1
, rd
, rs
);
26852 case OPC_DMULT_G_2F
:
26853 case OPC_DMULTU_G_2F
:
26854 case OPC_DDIV_G_2F
:
26855 case OPC_DDIVU_G_2F
:
26856 case OPC_DMOD_G_2F
:
26857 case OPC_DMODU_G_2F
:
26858 check_insn(ctx
, INSN_LOONGSON2F
);
26859 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26862 default: /* Invalid */
26863 MIPS_INVAL("special2_legacy");
26864 generate_exception_end(ctx
, EXCP_RI
);
26869 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26871 int rs
, rt
, rd
, sa
;
26875 rs
= (ctx
->opcode
>> 21) & 0x1f;
26876 rt
= (ctx
->opcode
>> 16) & 0x1f;
26877 rd
= (ctx
->opcode
>> 11) & 0x1f;
26878 sa
= (ctx
->opcode
>> 6) & 0x1f;
26879 imm
= (int16_t)ctx
->opcode
>> 7;
26881 op1
= MASK_SPECIAL3(ctx
->opcode
);
26885 /* hint codes 24-31 are reserved and signal RI */
26886 generate_exception_end(ctx
, EXCP_RI
);
26888 /* Treat as NOP. */
26891 check_cp0_enabled(ctx
);
26892 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26893 gen_cache_operation(ctx
, rt
, rs
, imm
);
26897 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
26900 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26905 /* Treat as NOP. */
26908 op2
= MASK_BSHFL(ctx
->opcode
);
26914 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
26917 gen_bitswap(ctx
, op2
, rd
, rt
);
26922 #if defined(TARGET_MIPS64)
26924 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
26927 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26930 check_mips_64(ctx
);
26933 /* Treat as NOP. */
26936 op2
= MASK_DBSHFL(ctx
->opcode
);
26946 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26949 gen_bitswap(ctx
, op2
, rd
, rt
);
26956 default: /* Invalid */
26957 MIPS_INVAL("special3_r6");
26958 generate_exception_end(ctx
, EXCP_RI
);
26963 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26968 rs
= (ctx
->opcode
>> 21) & 0x1f;
26969 rt
= (ctx
->opcode
>> 16) & 0x1f;
26970 rd
= (ctx
->opcode
>> 11) & 0x1f;
26972 op1
= MASK_SPECIAL3(ctx
->opcode
);
26975 case OPC_DIVU_G_2E
:
26977 case OPC_MODU_G_2E
:
26978 case OPC_MULT_G_2E
:
26979 case OPC_MULTU_G_2E
:
26980 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26981 * the same mask and op1. */
26982 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26983 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26986 case OPC_ADDUH_R_QB
:
26988 case OPC_ADDQH_R_PH
:
26990 case OPC_ADDQH_R_W
:
26992 case OPC_SUBUH_R_QB
:
26994 case OPC_SUBQH_R_PH
:
26996 case OPC_SUBQH_R_W
:
26997 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27002 case OPC_MULQ_RS_W
:
27003 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27006 MIPS_INVAL("MASK ADDUH.QB");
27007 generate_exception_end(ctx
, EXCP_RI
);
27010 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
27011 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27013 generate_exception_end(ctx
, EXCP_RI
);
27017 op2
= MASK_LX(ctx
->opcode
);
27019 #if defined(TARGET_MIPS64)
27025 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
27027 default: /* Invalid */
27028 MIPS_INVAL("MASK LX");
27029 generate_exception_end(ctx
, EXCP_RI
);
27033 case OPC_ABSQ_S_PH_DSP
:
27034 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
27036 case OPC_ABSQ_S_QB
:
27037 case OPC_ABSQ_S_PH
:
27039 case OPC_PRECEQ_W_PHL
:
27040 case OPC_PRECEQ_W_PHR
:
27041 case OPC_PRECEQU_PH_QBL
:
27042 case OPC_PRECEQU_PH_QBR
:
27043 case OPC_PRECEQU_PH_QBLA
:
27044 case OPC_PRECEQU_PH_QBRA
:
27045 case OPC_PRECEU_PH_QBL
:
27046 case OPC_PRECEU_PH_QBR
:
27047 case OPC_PRECEU_PH_QBLA
:
27048 case OPC_PRECEU_PH_QBRA
:
27049 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27056 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27059 MIPS_INVAL("MASK ABSQ_S.PH");
27060 generate_exception_end(ctx
, EXCP_RI
);
27064 case OPC_ADDU_QB_DSP
:
27065 op2
= MASK_ADDU_QB(ctx
->opcode
);
27068 case OPC_ADDQ_S_PH
:
27071 case OPC_ADDU_S_QB
:
27073 case OPC_ADDU_S_PH
:
27075 case OPC_SUBQ_S_PH
:
27078 case OPC_SUBU_S_QB
:
27080 case OPC_SUBU_S_PH
:
27084 case OPC_RADDU_W_QB
:
27085 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27087 case OPC_MULEU_S_PH_QBL
:
27088 case OPC_MULEU_S_PH_QBR
:
27089 case OPC_MULQ_RS_PH
:
27090 case OPC_MULEQ_S_W_PHL
:
27091 case OPC_MULEQ_S_W_PHR
:
27092 case OPC_MULQ_S_PH
:
27093 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27095 default: /* Invalid */
27096 MIPS_INVAL("MASK ADDU.QB");
27097 generate_exception_end(ctx
, EXCP_RI
);
27102 case OPC_CMPU_EQ_QB_DSP
:
27103 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
27105 case OPC_PRECR_SRA_PH_W
:
27106 case OPC_PRECR_SRA_R_PH_W
:
27107 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27109 case OPC_PRECR_QB_PH
:
27110 case OPC_PRECRQ_QB_PH
:
27111 case OPC_PRECRQ_PH_W
:
27112 case OPC_PRECRQ_RS_PH_W
:
27113 case OPC_PRECRQU_S_QB_PH
:
27114 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27116 case OPC_CMPU_EQ_QB
:
27117 case OPC_CMPU_LT_QB
:
27118 case OPC_CMPU_LE_QB
:
27119 case OPC_CMP_EQ_PH
:
27120 case OPC_CMP_LT_PH
:
27121 case OPC_CMP_LE_PH
:
27122 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27124 case OPC_CMPGU_EQ_QB
:
27125 case OPC_CMPGU_LT_QB
:
27126 case OPC_CMPGU_LE_QB
:
27127 case OPC_CMPGDU_EQ_QB
:
27128 case OPC_CMPGDU_LT_QB
:
27129 case OPC_CMPGDU_LE_QB
:
27132 case OPC_PACKRL_PH
:
27133 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27135 default: /* Invalid */
27136 MIPS_INVAL("MASK CMPU.EQ.QB");
27137 generate_exception_end(ctx
, EXCP_RI
);
27141 case OPC_SHLL_QB_DSP
:
27142 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27144 case OPC_DPA_W_PH_DSP
:
27145 op2
= MASK_DPA_W_PH(ctx
->opcode
);
27147 case OPC_DPAU_H_QBL
:
27148 case OPC_DPAU_H_QBR
:
27149 case OPC_DPSU_H_QBL
:
27150 case OPC_DPSU_H_QBR
:
27152 case OPC_DPAX_W_PH
:
27153 case OPC_DPAQ_S_W_PH
:
27154 case OPC_DPAQX_S_W_PH
:
27155 case OPC_DPAQX_SA_W_PH
:
27157 case OPC_DPSX_W_PH
:
27158 case OPC_DPSQ_S_W_PH
:
27159 case OPC_DPSQX_S_W_PH
:
27160 case OPC_DPSQX_SA_W_PH
:
27161 case OPC_MULSAQ_S_W_PH
:
27162 case OPC_DPAQ_SA_L_W
:
27163 case OPC_DPSQ_SA_L_W
:
27164 case OPC_MAQ_S_W_PHL
:
27165 case OPC_MAQ_S_W_PHR
:
27166 case OPC_MAQ_SA_W_PHL
:
27167 case OPC_MAQ_SA_W_PHR
:
27168 case OPC_MULSA_W_PH
:
27169 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27171 default: /* Invalid */
27172 MIPS_INVAL("MASK DPAW.PH");
27173 generate_exception_end(ctx
, EXCP_RI
);
27178 op2
= MASK_INSV(ctx
->opcode
);
27189 t0
= tcg_temp_new();
27190 t1
= tcg_temp_new();
27192 gen_load_gpr(t0
, rt
);
27193 gen_load_gpr(t1
, rs
);
27195 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27201 default: /* Invalid */
27202 MIPS_INVAL("MASK INSV");
27203 generate_exception_end(ctx
, EXCP_RI
);
27207 case OPC_APPEND_DSP
:
27208 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27210 case OPC_EXTR_W_DSP
:
27211 op2
= MASK_EXTR_W(ctx
->opcode
);
27215 case OPC_EXTR_RS_W
:
27217 case OPC_EXTRV_S_H
:
27219 case OPC_EXTRV_R_W
:
27220 case OPC_EXTRV_RS_W
:
27225 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27228 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27234 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27236 default: /* Invalid */
27237 MIPS_INVAL("MASK EXTR.W");
27238 generate_exception_end(ctx
, EXCP_RI
);
27242 #if defined(TARGET_MIPS64)
27243 case OPC_DDIV_G_2E
:
27244 case OPC_DDIVU_G_2E
:
27245 case OPC_DMULT_G_2E
:
27246 case OPC_DMULTU_G_2E
:
27247 case OPC_DMOD_G_2E
:
27248 case OPC_DMODU_G_2E
:
27249 check_insn(ctx
, INSN_LOONGSON2E
);
27250 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27252 case OPC_ABSQ_S_QH_DSP
:
27253 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27255 case OPC_PRECEQ_L_PWL
:
27256 case OPC_PRECEQ_L_PWR
:
27257 case OPC_PRECEQ_PW_QHL
:
27258 case OPC_PRECEQ_PW_QHR
:
27259 case OPC_PRECEQ_PW_QHLA
:
27260 case OPC_PRECEQ_PW_QHRA
:
27261 case OPC_PRECEQU_QH_OBL
:
27262 case OPC_PRECEQU_QH_OBR
:
27263 case OPC_PRECEQU_QH_OBLA
:
27264 case OPC_PRECEQU_QH_OBRA
:
27265 case OPC_PRECEU_QH_OBL
:
27266 case OPC_PRECEU_QH_OBR
:
27267 case OPC_PRECEU_QH_OBLA
:
27268 case OPC_PRECEU_QH_OBRA
:
27269 case OPC_ABSQ_S_OB
:
27270 case OPC_ABSQ_S_PW
:
27271 case OPC_ABSQ_S_QH
:
27272 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27280 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27282 default: /* Invalid */
27283 MIPS_INVAL("MASK ABSQ_S.QH");
27284 generate_exception_end(ctx
, EXCP_RI
);
27288 case OPC_ADDU_OB_DSP
:
27289 op2
= MASK_ADDU_OB(ctx
->opcode
);
27291 case OPC_RADDU_L_OB
:
27293 case OPC_SUBQ_S_PW
:
27295 case OPC_SUBQ_S_QH
:
27297 case OPC_SUBU_S_OB
:
27299 case OPC_SUBU_S_QH
:
27301 case OPC_SUBUH_R_OB
:
27303 case OPC_ADDQ_S_PW
:
27305 case OPC_ADDQ_S_QH
:
27307 case OPC_ADDU_S_OB
:
27309 case OPC_ADDU_S_QH
:
27311 case OPC_ADDUH_R_OB
:
27312 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27314 case OPC_MULEQ_S_PW_QHL
:
27315 case OPC_MULEQ_S_PW_QHR
:
27316 case OPC_MULEU_S_QH_OBL
:
27317 case OPC_MULEU_S_QH_OBR
:
27318 case OPC_MULQ_RS_QH
:
27319 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27321 default: /* Invalid */
27322 MIPS_INVAL("MASK ADDU.OB");
27323 generate_exception_end(ctx
, EXCP_RI
);
27327 case OPC_CMPU_EQ_OB_DSP
:
27328 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27330 case OPC_PRECR_SRA_QH_PW
:
27331 case OPC_PRECR_SRA_R_QH_PW
:
27332 /* Return value is rt. */
27333 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27335 case OPC_PRECR_OB_QH
:
27336 case OPC_PRECRQ_OB_QH
:
27337 case OPC_PRECRQ_PW_L
:
27338 case OPC_PRECRQ_QH_PW
:
27339 case OPC_PRECRQ_RS_QH_PW
:
27340 case OPC_PRECRQU_S_OB_QH
:
27341 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27343 case OPC_CMPU_EQ_OB
:
27344 case OPC_CMPU_LT_OB
:
27345 case OPC_CMPU_LE_OB
:
27346 case OPC_CMP_EQ_QH
:
27347 case OPC_CMP_LT_QH
:
27348 case OPC_CMP_LE_QH
:
27349 case OPC_CMP_EQ_PW
:
27350 case OPC_CMP_LT_PW
:
27351 case OPC_CMP_LE_PW
:
27352 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27354 case OPC_CMPGDU_EQ_OB
:
27355 case OPC_CMPGDU_LT_OB
:
27356 case OPC_CMPGDU_LE_OB
:
27357 case OPC_CMPGU_EQ_OB
:
27358 case OPC_CMPGU_LT_OB
:
27359 case OPC_CMPGU_LE_OB
:
27360 case OPC_PACKRL_PW
:
27364 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27366 default: /* Invalid */
27367 MIPS_INVAL("MASK CMPU_EQ.OB");
27368 generate_exception_end(ctx
, EXCP_RI
);
27372 case OPC_DAPPEND_DSP
:
27373 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27375 case OPC_DEXTR_W_DSP
:
27376 op2
= MASK_DEXTR_W(ctx
->opcode
);
27383 case OPC_DEXTR_R_L
:
27384 case OPC_DEXTR_RS_L
:
27386 case OPC_DEXTR_R_W
:
27387 case OPC_DEXTR_RS_W
:
27388 case OPC_DEXTR_S_H
:
27390 case OPC_DEXTRV_R_L
:
27391 case OPC_DEXTRV_RS_L
:
27392 case OPC_DEXTRV_S_H
:
27394 case OPC_DEXTRV_R_W
:
27395 case OPC_DEXTRV_RS_W
:
27396 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27401 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27403 default: /* Invalid */
27404 MIPS_INVAL("MASK EXTR.W");
27405 generate_exception_end(ctx
, EXCP_RI
);
27409 case OPC_DPAQ_W_QH_DSP
:
27410 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27412 case OPC_DPAU_H_OBL
:
27413 case OPC_DPAU_H_OBR
:
27414 case OPC_DPSU_H_OBL
:
27415 case OPC_DPSU_H_OBR
:
27417 case OPC_DPAQ_S_W_QH
:
27419 case OPC_DPSQ_S_W_QH
:
27420 case OPC_MULSAQ_S_W_QH
:
27421 case OPC_DPAQ_SA_L_PW
:
27422 case OPC_DPSQ_SA_L_PW
:
27423 case OPC_MULSAQ_S_L_PW
:
27424 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27426 case OPC_MAQ_S_W_QHLL
:
27427 case OPC_MAQ_S_W_QHLR
:
27428 case OPC_MAQ_S_W_QHRL
:
27429 case OPC_MAQ_S_W_QHRR
:
27430 case OPC_MAQ_SA_W_QHLL
:
27431 case OPC_MAQ_SA_W_QHLR
:
27432 case OPC_MAQ_SA_W_QHRL
:
27433 case OPC_MAQ_SA_W_QHRR
:
27434 case OPC_MAQ_S_L_PWL
:
27435 case OPC_MAQ_S_L_PWR
:
27440 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27442 default: /* Invalid */
27443 MIPS_INVAL("MASK DPAQ.W.QH");
27444 generate_exception_end(ctx
, EXCP_RI
);
27448 case OPC_DINSV_DSP
:
27449 op2
= MASK_INSV(ctx
->opcode
);
27460 t0
= tcg_temp_new();
27461 t1
= tcg_temp_new();
27463 gen_load_gpr(t0
, rt
);
27464 gen_load_gpr(t1
, rs
);
27466 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27472 default: /* Invalid */
27473 MIPS_INVAL("MASK DINSV");
27474 generate_exception_end(ctx
, EXCP_RI
);
27478 case OPC_SHLL_OB_DSP
:
27479 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27482 default: /* Invalid */
27483 MIPS_INVAL("special3_legacy");
27484 generate_exception_end(ctx
, EXCP_RI
);
27490 #if defined(TARGET_MIPS64)
27492 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27494 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27497 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27498 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27499 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27500 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27501 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27502 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27503 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27504 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27505 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27506 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27507 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27508 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27509 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27510 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27511 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27512 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27513 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27514 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27515 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27516 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27517 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27518 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27519 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27520 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27521 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27522 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27525 MIPS_INVAL("TX79 MMI class MMI0");
27526 generate_exception_end(ctx
, EXCP_RI
);
27531 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27533 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27536 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27537 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27538 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27539 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27540 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27541 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27542 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27543 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27544 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27545 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27546 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27547 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27548 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27549 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27550 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27551 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27552 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27553 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27554 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27557 MIPS_INVAL("TX79 MMI class MMI1");
27558 generate_exception_end(ctx
, EXCP_RI
);
27563 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27565 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27568 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27569 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27570 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27571 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27572 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27573 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27574 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27575 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27576 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27577 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27578 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27579 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27580 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27581 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27582 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27583 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27584 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27585 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27586 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27587 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27588 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27589 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27591 case MMI_OPC_2_PCPYLD
:
27592 gen_mmi_pcpyld(ctx
);
27595 MIPS_INVAL("TX79 MMI class MMI2");
27596 generate_exception_end(ctx
, EXCP_RI
);
27601 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27603 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27606 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27607 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27608 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27609 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27610 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27611 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27612 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27613 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27614 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27615 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27616 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27617 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27619 case MMI_OPC_3_PCPYH
:
27620 gen_mmi_pcpyh(ctx
);
27622 case MMI_OPC_3_PCPYUD
:
27623 gen_mmi_pcpyud(ctx
);
27626 MIPS_INVAL("TX79 MMI class MMI3");
27627 generate_exception_end(ctx
, EXCP_RI
);
27632 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27634 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27635 int rs
= extract32(ctx
->opcode
, 21, 5);
27636 int rt
= extract32(ctx
->opcode
, 16, 5);
27637 int rd
= extract32(ctx
->opcode
, 11, 5);
27640 case MMI_OPC_CLASS_MMI0
:
27641 decode_mmi0(env
, ctx
);
27643 case MMI_OPC_CLASS_MMI1
:
27644 decode_mmi1(env
, ctx
);
27646 case MMI_OPC_CLASS_MMI2
:
27647 decode_mmi2(env
, ctx
);
27649 case MMI_OPC_CLASS_MMI3
:
27650 decode_mmi3(env
, ctx
);
27652 case MMI_OPC_MULT1
:
27653 case MMI_OPC_MULTU1
:
27655 case MMI_OPC_MADDU
:
27656 case MMI_OPC_MADD1
:
27657 case MMI_OPC_MADDU1
:
27658 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27661 case MMI_OPC_DIVU1
:
27662 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27664 case MMI_OPC_MTLO1
:
27665 case MMI_OPC_MTHI1
:
27666 gen_HILO1_tx79(ctx
, opc
, rs
);
27668 case MMI_OPC_MFLO1
:
27669 case MMI_OPC_MFHI1
:
27670 gen_HILO1_tx79(ctx
, opc
, rd
);
27672 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27673 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27674 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27675 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27676 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27677 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27678 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27679 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27680 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27681 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27684 MIPS_INVAL("TX79 MMI class");
27685 generate_exception_end(ctx
, EXCP_RI
);
27690 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27692 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27695 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27697 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27701 * The TX79-specific instruction Store Quadword
27703 * +--------+-------+-------+------------------------+
27704 * | 011111 | base | rt | offset | SQ
27705 * +--------+-------+-------+------------------------+
27708 * has the same opcode as the Read Hardware Register instruction
27710 * +--------+-------+-------+-------+-------+--------+
27711 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27712 * +--------+-------+-------+-------+-------+--------+
27715 * that is required, trapped and emulated by the Linux kernel. However, all
27716 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27717 * offset is odd. Therefore all valid SQ instructions can execute normally.
27718 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27719 * between SQ and RDHWR, as the Linux kernel does.
27721 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27723 int base
= extract32(ctx
->opcode
, 21, 5);
27724 int rt
= extract32(ctx
->opcode
, 16, 5);
27725 int offset
= extract32(ctx
->opcode
, 0, 16);
27727 #ifdef CONFIG_USER_ONLY
27728 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27729 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27731 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27732 int rd
= extract32(ctx
->opcode
, 11, 5);
27734 gen_rdhwr(ctx
, rt
, rd
, 0);
27739 gen_mmi_sq(ctx
, base
, rt
, offset
);
27744 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27746 int rs
, rt
, rd
, sa
;
27750 rs
= (ctx
->opcode
>> 21) & 0x1f;
27751 rt
= (ctx
->opcode
>> 16) & 0x1f;
27752 rd
= (ctx
->opcode
>> 11) & 0x1f;
27753 sa
= (ctx
->opcode
>> 6) & 0x1f;
27754 imm
= sextract32(ctx
->opcode
, 7, 9);
27756 op1
= MASK_SPECIAL3(ctx
->opcode
);
27759 * EVA loads and stores overlap Loongson 2E instructions decoded by
27760 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27767 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27775 check_cp0_enabled(ctx
);
27776 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27780 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27785 check_cp0_enabled(ctx
);
27786 gen_st(ctx
, op1
, rt
, rs
, imm
);
27789 check_cp0_enabled(ctx
);
27790 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
27793 check_cp0_enabled(ctx
);
27794 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27795 gen_cache_operation(ctx
, rt
, rs
, imm
);
27797 /* Treat as NOP. */
27800 check_cp0_enabled(ctx
);
27801 /* Treat as NOP. */
27809 check_insn(ctx
, ISA_MIPS32R2
);
27810 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27813 op2
= MASK_BSHFL(ctx
->opcode
);
27820 check_insn(ctx
, ISA_MIPS32R6
);
27821 decode_opc_special3_r6(env
, ctx
);
27824 check_insn(ctx
, ISA_MIPS32R2
);
27825 gen_bshfl(ctx
, op2
, rt
, rd
);
27829 #if defined(TARGET_MIPS64)
27836 check_insn(ctx
, ISA_MIPS64R2
);
27837 check_mips_64(ctx
);
27838 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27841 op2
= MASK_DBSHFL(ctx
->opcode
);
27852 check_insn(ctx
, ISA_MIPS32R6
);
27853 decode_opc_special3_r6(env
, ctx
);
27856 check_insn(ctx
, ISA_MIPS64R2
);
27857 check_mips_64(ctx
);
27858 op2
= MASK_DBSHFL(ctx
->opcode
);
27859 gen_bshfl(ctx
, op2
, rt
, rd
);
27865 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27870 TCGv t0
= tcg_temp_new();
27871 TCGv t1
= tcg_temp_new();
27873 gen_load_gpr(t0
, rt
);
27874 gen_load_gpr(t1
, rs
);
27875 gen_helper_fork(t0
, t1
);
27883 TCGv t0
= tcg_temp_new();
27885 gen_load_gpr(t0
, rs
);
27886 gen_helper_yield(t0
, cpu_env
, t0
);
27887 gen_store_gpr(t0
, rd
);
27892 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
27893 decode_opc_special3_r6(env
, ctx
);
27895 decode_opc_special3_legacy(env
, ctx
);
27900 /* MIPS SIMD Architecture (MSA) */
27901 static inline int check_msa_access(DisasContext
*ctx
)
27903 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
27904 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
27905 generate_exception_end(ctx
, EXCP_RI
);
27909 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
27910 if (ctx
->insn_flags
& ASE_MSA
) {
27911 generate_exception_end(ctx
, EXCP_MSADIS
);
27914 generate_exception_end(ctx
, EXCP_RI
);
27921 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
27923 /* generates tcg ops to check if any element is 0 */
27924 /* Note this function only works with MSA_WRLEN = 128 */
27925 uint64_t eval_zero_or_big
= 0;
27926 uint64_t eval_big
= 0;
27927 TCGv_i64 t0
= tcg_temp_new_i64();
27928 TCGv_i64 t1
= tcg_temp_new_i64();
27931 eval_zero_or_big
= 0x0101010101010101ULL
;
27932 eval_big
= 0x8080808080808080ULL
;
27935 eval_zero_or_big
= 0x0001000100010001ULL
;
27936 eval_big
= 0x8000800080008000ULL
;
27939 eval_zero_or_big
= 0x0000000100000001ULL
;
27940 eval_big
= 0x8000000080000000ULL
;
27943 eval_zero_or_big
= 0x0000000000000001ULL
;
27944 eval_big
= 0x8000000000000000ULL
;
27947 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<< 1], eval_zero_or_big
);
27948 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<< 1]);
27949 tcg_gen_andi_i64(t0
, t0
, eval_big
);
27950 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<< 1) + 1], eval_zero_or_big
);
27951 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<< 1) + 1]);
27952 tcg_gen_andi_i64(t1
, t1
, eval_big
);
27953 tcg_gen_or_i64(t0
, t0
, t1
);
27954 /* if all bits are zero then all elements are not zero */
27955 /* if some bit is non-zero then some element is zero */
27956 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
27957 tcg_gen_trunc_i64_tl(tresult
, t0
);
27958 tcg_temp_free_i64(t0
);
27959 tcg_temp_free_i64(t1
);
27962 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
27964 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
27965 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
27966 int64_t s16
= (int16_t)ctx
->opcode
;
27968 check_msa_access(ctx
);
27970 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27971 generate_exception_end(ctx
, EXCP_RI
);
27978 TCGv_i64 t0
= tcg_temp_new_i64();
27979 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<< 1], msa_wr_d
[(wt
<< 1) + 1]);
27980 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
27981 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
27982 tcg_gen_trunc_i64_tl(bcond
, t0
);
27983 tcg_temp_free_i64(t0
);
27990 gen_check_zero_element(bcond
, df
, wt
);
27996 gen_check_zero_element(bcond
, df
, wt
);
27997 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
28001 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
28003 ctx
->hflags
|= MIPS_HFLAG_BC
;
28004 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
28007 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
28009 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
28010 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
28011 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28012 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28014 TCGv_i32 twd
= tcg_const_i32(wd
);
28015 TCGv_i32 tws
= tcg_const_i32(ws
);
28016 TCGv_i32 ti8
= tcg_const_i32(i8
);
28018 switch (MASK_MSA_I8(ctx
->opcode
)) {
28020 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
28023 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
28026 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
28029 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
28032 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
28035 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
28038 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
28044 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
28045 if (df
== DF_DOUBLE
) {
28046 generate_exception_end(ctx
, EXCP_RI
);
28048 TCGv_i32 tdf
= tcg_const_i32(df
);
28049 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
28050 tcg_temp_free_i32(tdf
);
28055 MIPS_INVAL("MSA instruction");
28056 generate_exception_end(ctx
, EXCP_RI
);
28060 tcg_temp_free_i32(twd
);
28061 tcg_temp_free_i32(tws
);
28062 tcg_temp_free_i32(ti8
);
28065 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
28067 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28068 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28069 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
28070 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
28071 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28072 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28074 TCGv_i32 tdf
= tcg_const_i32(df
);
28075 TCGv_i32 twd
= tcg_const_i32(wd
);
28076 TCGv_i32 tws
= tcg_const_i32(ws
);
28077 TCGv_i32 timm
= tcg_temp_new_i32();
28078 tcg_gen_movi_i32(timm
, u5
);
28080 switch (MASK_MSA_I5(ctx
->opcode
)) {
28082 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28085 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28087 case OPC_MAXI_S_df
:
28088 tcg_gen_movi_i32(timm
, s5
);
28089 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28091 case OPC_MAXI_U_df
:
28092 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28094 case OPC_MINI_S_df
:
28095 tcg_gen_movi_i32(timm
, s5
);
28096 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28098 case OPC_MINI_U_df
:
28099 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28102 tcg_gen_movi_i32(timm
, s5
);
28103 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28105 case OPC_CLTI_S_df
:
28106 tcg_gen_movi_i32(timm
, s5
);
28107 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28109 case OPC_CLTI_U_df
:
28110 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28112 case OPC_CLEI_S_df
:
28113 tcg_gen_movi_i32(timm
, s5
);
28114 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28116 case OPC_CLEI_U_df
:
28117 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28121 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
28122 tcg_gen_movi_i32(timm
, s10
);
28123 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
28127 MIPS_INVAL("MSA instruction");
28128 generate_exception_end(ctx
, EXCP_RI
);
28132 tcg_temp_free_i32(tdf
);
28133 tcg_temp_free_i32(twd
);
28134 tcg_temp_free_i32(tws
);
28135 tcg_temp_free_i32(timm
);
28138 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
28140 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28141 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
28142 uint32_t df
= 0, m
= 0;
28143 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28144 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28151 if ((dfm
& 0x40) == 0x00) {
28154 } else if ((dfm
& 0x60) == 0x40) {
28157 } else if ((dfm
& 0x70) == 0x60) {
28160 } else if ((dfm
& 0x78) == 0x70) {
28164 generate_exception_end(ctx
, EXCP_RI
);
28168 tdf
= tcg_const_i32(df
);
28169 tm
= tcg_const_i32(m
);
28170 twd
= tcg_const_i32(wd
);
28171 tws
= tcg_const_i32(ws
);
28173 switch (MASK_MSA_BIT(ctx
->opcode
)) {
28175 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28178 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
28181 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28184 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28187 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
28190 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
28192 case OPC_BINSLI_df
:
28193 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28195 case OPC_BINSRI_df
:
28196 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28199 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
28202 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
28205 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
28208 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28211 MIPS_INVAL("MSA instruction");
28212 generate_exception_end(ctx
, EXCP_RI
);
28216 tcg_temp_free_i32(tdf
);
28217 tcg_temp_free_i32(tm
);
28218 tcg_temp_free_i32(twd
);
28219 tcg_temp_free_i32(tws
);
28222 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28224 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28225 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28226 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28227 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28228 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28230 TCGv_i32 tdf
= tcg_const_i32(df
);
28231 TCGv_i32 twd
= tcg_const_i32(wd
);
28232 TCGv_i32 tws
= tcg_const_i32(ws
);
28233 TCGv_i32 twt
= tcg_const_i32(wt
);
28235 switch (MASK_MSA_3R(ctx
->opcode
)) {
28237 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28240 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28243 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28246 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28248 case OPC_SUBS_S_df
:
28249 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28252 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28255 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28258 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28261 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28264 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28266 case OPC_ADDS_A_df
:
28267 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28269 case OPC_SUBS_U_df
:
28270 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28273 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28276 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28279 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28282 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28285 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28288 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28290 case OPC_ADDS_S_df
:
28291 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28293 case OPC_SUBSUS_U_df
:
28294 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28297 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28300 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28303 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28306 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28309 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28312 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28314 case OPC_ADDS_U_df
:
28315 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28317 case OPC_SUBSUU_S_df
:
28318 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28321 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28324 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28327 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28330 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28333 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28335 case OPC_ASUB_S_df
:
28336 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28339 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28342 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28345 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28348 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28351 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28354 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28356 case OPC_ASUB_U_df
:
28357 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28360 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28363 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28366 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28369 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28371 case OPC_AVER_S_df
:
28372 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28375 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28378 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28381 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28384 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28386 case OPC_AVER_U_df
:
28387 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28390 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28393 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28396 case OPC_DOTP_S_df
:
28397 case OPC_DOTP_U_df
:
28398 case OPC_DPADD_S_df
:
28399 case OPC_DPADD_U_df
:
28400 case OPC_DPSUB_S_df
:
28401 case OPC_HADD_S_df
:
28402 case OPC_DPSUB_U_df
:
28403 case OPC_HADD_U_df
:
28404 case OPC_HSUB_S_df
:
28405 case OPC_HSUB_U_df
:
28406 if (df
== DF_BYTE
) {
28407 generate_exception_end(ctx
, EXCP_RI
);
28410 switch (MASK_MSA_3R(ctx
->opcode
)) {
28411 case OPC_DOTP_S_df
:
28412 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28414 case OPC_DOTP_U_df
:
28415 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28417 case OPC_DPADD_S_df
:
28418 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28420 case OPC_DPADD_U_df
:
28421 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28423 case OPC_DPSUB_S_df
:
28424 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28426 case OPC_HADD_S_df
:
28427 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28429 case OPC_DPSUB_U_df
:
28430 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28432 case OPC_HADD_U_df
:
28433 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28435 case OPC_HSUB_S_df
:
28436 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28438 case OPC_HSUB_U_df
:
28439 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28444 MIPS_INVAL("MSA instruction");
28445 generate_exception_end(ctx
, EXCP_RI
);
28448 tcg_temp_free_i32(twd
);
28449 tcg_temp_free_i32(tws
);
28450 tcg_temp_free_i32(twt
);
28451 tcg_temp_free_i32(tdf
);
28454 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28456 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28457 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28458 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28459 TCGv telm
= tcg_temp_new();
28460 TCGv_i32 tsr
= tcg_const_i32(source
);
28461 TCGv_i32 tdt
= tcg_const_i32(dest
);
28463 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28465 gen_load_gpr(telm
, source
);
28466 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28469 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28470 gen_store_gpr(telm
, dest
);
28473 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28476 MIPS_INVAL("MSA instruction");
28477 generate_exception_end(ctx
, EXCP_RI
);
28481 tcg_temp_free(telm
);
28482 tcg_temp_free_i32(tdt
);
28483 tcg_temp_free_i32(tsr
);
28486 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28489 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28490 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28491 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28493 TCGv_i32 tws
= tcg_const_i32(ws
);
28494 TCGv_i32 twd
= tcg_const_i32(wd
);
28495 TCGv_i32 tn
= tcg_const_i32(n
);
28496 TCGv_i32 tdf
= tcg_const_i32(df
);
28498 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28500 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28502 case OPC_SPLATI_df
:
28503 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28506 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28508 case OPC_COPY_S_df
:
28509 case OPC_COPY_U_df
:
28510 case OPC_INSERT_df
:
28511 #if !defined(TARGET_MIPS64)
28512 /* Double format valid only for MIPS64 */
28513 if (df
== DF_DOUBLE
) {
28514 generate_exception_end(ctx
, EXCP_RI
);
28517 if ((MASK_MSA_ELM(ctx
->opcode
) == OPC_COPY_U_df
) &&
28519 generate_exception_end(ctx
, EXCP_RI
);
28523 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28524 case OPC_COPY_S_df
:
28525 if (likely(wd
!= 0)) {
28528 gen_helper_msa_copy_s_b(cpu_env
, twd
, tws
, tn
);
28531 gen_helper_msa_copy_s_h(cpu_env
, twd
, tws
, tn
);
28534 gen_helper_msa_copy_s_w(cpu_env
, twd
, tws
, tn
);
28536 #if defined(TARGET_MIPS64)
28538 gen_helper_msa_copy_s_d(cpu_env
, twd
, tws
, tn
);
28546 case OPC_COPY_U_df
:
28547 if (likely(wd
!= 0)) {
28550 gen_helper_msa_copy_u_b(cpu_env
, twd
, tws
, tn
);
28553 gen_helper_msa_copy_u_h(cpu_env
, twd
, tws
, tn
);
28555 #if defined(TARGET_MIPS64)
28557 gen_helper_msa_copy_u_w(cpu_env
, twd
, tws
, tn
);
28565 case OPC_INSERT_df
:
28568 gen_helper_msa_insert_b(cpu_env
, twd
, tws
, tn
);
28571 gen_helper_msa_insert_h(cpu_env
, twd
, tws
, tn
);
28574 gen_helper_msa_insert_w(cpu_env
, twd
, tws
, tn
);
28576 #if defined(TARGET_MIPS64)
28578 gen_helper_msa_insert_d(cpu_env
, twd
, tws
, tn
);
28588 MIPS_INVAL("MSA instruction");
28589 generate_exception_end(ctx
, EXCP_RI
);
28591 tcg_temp_free_i32(twd
);
28592 tcg_temp_free_i32(tws
);
28593 tcg_temp_free_i32(tn
);
28594 tcg_temp_free_i32(tdf
);
28597 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28599 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28600 uint32_t df
= 0, n
= 0;
28602 if ((dfn
& 0x30) == 0x00) {
28605 } else if ((dfn
& 0x38) == 0x20) {
28608 } else if ((dfn
& 0x3c) == 0x30) {
28611 } else if ((dfn
& 0x3e) == 0x38) {
28614 } else if (dfn
== 0x3E) {
28615 /* CTCMSA, CFCMSA, MOVE.V */
28616 gen_msa_elm_3e(env
, ctx
);
28619 generate_exception_end(ctx
, EXCP_RI
);
28623 gen_msa_elm_df(env
, ctx
, df
, n
);
28626 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28628 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28629 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28630 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28631 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28632 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28634 TCGv_i32 twd
= tcg_const_i32(wd
);
28635 TCGv_i32 tws
= tcg_const_i32(ws
);
28636 TCGv_i32 twt
= tcg_const_i32(wt
);
28637 TCGv_i32 tdf
= tcg_temp_new_i32();
28639 /* adjust df value for floating-point instruction */
28640 tcg_gen_movi_i32(tdf
, df
+ 2);
28642 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28644 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28647 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28650 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28653 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28656 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28659 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28662 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28665 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28668 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28671 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28674 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28677 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28680 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28683 tcg_gen_movi_i32(tdf
, df
+ 1);
28684 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28687 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28690 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28692 case OPC_MADD_Q_df
:
28693 tcg_gen_movi_i32(tdf
, df
+ 1);
28694 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28697 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28699 case OPC_MSUB_Q_df
:
28700 tcg_gen_movi_i32(tdf
, df
+ 1);
28701 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28704 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28707 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28710 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28713 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28716 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28719 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28722 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28725 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28728 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28731 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28734 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28737 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28740 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28742 case OPC_MULR_Q_df
:
28743 tcg_gen_movi_i32(tdf
, df
+ 1);
28744 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28747 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28749 case OPC_FMIN_A_df
:
28750 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28752 case OPC_MADDR_Q_df
:
28753 tcg_gen_movi_i32(tdf
, df
+ 1);
28754 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28757 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28760 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28762 case OPC_MSUBR_Q_df
:
28763 tcg_gen_movi_i32(tdf
, df
+ 1);
28764 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28767 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28769 case OPC_FMAX_A_df
:
28770 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28773 MIPS_INVAL("MSA instruction");
28774 generate_exception_end(ctx
, EXCP_RI
);
28778 tcg_temp_free_i32(twd
);
28779 tcg_temp_free_i32(tws
);
28780 tcg_temp_free_i32(twt
);
28781 tcg_temp_free_i32(tdf
);
28784 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28786 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28787 (op & (0x7 << 18)))
28788 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28789 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28790 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28791 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28792 TCGv_i32 twd
= tcg_const_i32(wd
);
28793 TCGv_i32 tws
= tcg_const_i32(ws
);
28794 TCGv_i32 twt
= tcg_const_i32(wt
);
28795 TCGv_i32 tdf
= tcg_const_i32(df
);
28797 switch (MASK_MSA_2R(ctx
->opcode
)) {
28799 #if !defined(TARGET_MIPS64)
28800 /* Double format valid only for MIPS64 */
28801 if (df
== DF_DOUBLE
) {
28802 generate_exception_end(ctx
, EXCP_RI
);
28806 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28809 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28812 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28815 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28818 MIPS_INVAL("MSA instruction");
28819 generate_exception_end(ctx
, EXCP_RI
);
28823 tcg_temp_free_i32(twd
);
28824 tcg_temp_free_i32(tws
);
28825 tcg_temp_free_i32(twt
);
28826 tcg_temp_free_i32(tdf
);
28829 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28831 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28832 (op & (0xf << 17)))
28833 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28834 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28835 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28836 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28837 TCGv_i32 twd
= tcg_const_i32(wd
);
28838 TCGv_i32 tws
= tcg_const_i32(ws
);
28839 TCGv_i32 twt
= tcg_const_i32(wt
);
28840 /* adjust df value for floating-point instruction */
28841 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28843 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28844 case OPC_FCLASS_df
:
28845 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28847 case OPC_FTRUNC_S_df
:
28848 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28850 case OPC_FTRUNC_U_df
:
28851 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28854 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28856 case OPC_FRSQRT_df
:
28857 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28860 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28863 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28866 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28868 case OPC_FEXUPL_df
:
28869 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28871 case OPC_FEXUPR_df
:
28872 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28875 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28878 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28880 case OPC_FTINT_S_df
:
28881 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28883 case OPC_FTINT_U_df
:
28884 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28886 case OPC_FFINT_S_df
:
28887 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28889 case OPC_FFINT_U_df
:
28890 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
28894 tcg_temp_free_i32(twd
);
28895 tcg_temp_free_i32(tws
);
28896 tcg_temp_free_i32(twt
);
28897 tcg_temp_free_i32(tdf
);
28900 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
28902 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
28903 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28904 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28905 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28906 TCGv_i32 twd
= tcg_const_i32(wd
);
28907 TCGv_i32 tws
= tcg_const_i32(ws
);
28908 TCGv_i32 twt
= tcg_const_i32(wt
);
28910 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28912 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
28915 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
28918 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
28921 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
28924 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
28927 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
28930 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
28933 MIPS_INVAL("MSA instruction");
28934 generate_exception_end(ctx
, EXCP_RI
);
28938 tcg_temp_free_i32(twd
);
28939 tcg_temp_free_i32(tws
);
28940 tcg_temp_free_i32(twt
);
28943 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
28945 switch (MASK_MSA_VEC(ctx
->opcode
)) {
28953 gen_msa_vec_v(env
, ctx
);
28956 gen_msa_2r(env
, ctx
);
28959 gen_msa_2rf(env
, ctx
);
28962 MIPS_INVAL("MSA instruction");
28963 generate_exception_end(ctx
, EXCP_RI
);
28968 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
28970 uint32_t opcode
= ctx
->opcode
;
28971 check_insn(ctx
, ASE_MSA
);
28972 check_msa_access(ctx
);
28974 switch (MASK_MSA_MINOR(opcode
)) {
28975 case OPC_MSA_I8_00
:
28976 case OPC_MSA_I8_01
:
28977 case OPC_MSA_I8_02
:
28978 gen_msa_i8(env
, ctx
);
28980 case OPC_MSA_I5_06
:
28981 case OPC_MSA_I5_07
:
28982 gen_msa_i5(env
, ctx
);
28984 case OPC_MSA_BIT_09
:
28985 case OPC_MSA_BIT_0A
:
28986 gen_msa_bit(env
, ctx
);
28988 case OPC_MSA_3R_0D
:
28989 case OPC_MSA_3R_0E
:
28990 case OPC_MSA_3R_0F
:
28991 case OPC_MSA_3R_10
:
28992 case OPC_MSA_3R_11
:
28993 case OPC_MSA_3R_12
:
28994 case OPC_MSA_3R_13
:
28995 case OPC_MSA_3R_14
:
28996 case OPC_MSA_3R_15
:
28997 gen_msa_3r(env
, ctx
);
29000 gen_msa_elm(env
, ctx
);
29002 case OPC_MSA_3RF_1A
:
29003 case OPC_MSA_3RF_1B
:
29004 case OPC_MSA_3RF_1C
:
29005 gen_msa_3rf(env
, ctx
);
29008 gen_msa_vec(env
, ctx
);
29019 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
29020 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
29021 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29022 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
29024 TCGv_i32 twd
= tcg_const_i32(wd
);
29025 TCGv taddr
= tcg_temp_new();
29026 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
29028 switch (MASK_MSA_MINOR(opcode
)) {
29030 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
29033 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
29036 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
29039 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
29042 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
29045 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
29048 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
29051 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
29055 tcg_temp_free_i32(twd
);
29056 tcg_temp_free(taddr
);
29060 MIPS_INVAL("MSA instruction");
29061 generate_exception_end(ctx
, EXCP_RI
);
29067 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
29070 int rs
, rt
, rd
, sa
;
29074 /* make sure instructions are on a word boundary */
29075 if (ctx
->base
.pc_next
& 0x3) {
29076 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
29077 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
29081 /* Handle blikely not taken case */
29082 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
29083 TCGLabel
*l1
= gen_new_label();
29085 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
29086 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
29087 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
29091 op
= MASK_OP_MAJOR(ctx
->opcode
);
29092 rs
= (ctx
->opcode
>> 21) & 0x1f;
29093 rt
= (ctx
->opcode
>> 16) & 0x1f;
29094 rd
= (ctx
->opcode
>> 11) & 0x1f;
29095 sa
= (ctx
->opcode
>> 6) & 0x1f;
29096 imm
= (int16_t)ctx
->opcode
;
29099 decode_opc_special(env
, ctx
);
29102 #if defined(TARGET_MIPS64)
29103 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
29104 decode_mmi(env
, ctx
);
29106 if (ctx
->insn_flags
& ASE_MXU
) {
29107 decode_opc_mxu(env
, ctx
);
29110 decode_opc_special2_legacy(env
, ctx
);
29114 #if defined(TARGET_MIPS64)
29115 if (ctx
->insn_flags
& INSN_R5900
) {
29116 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
29118 decode_opc_special3(env
, ctx
);
29121 decode_opc_special3(env
, ctx
);
29125 op1
= MASK_REGIMM(ctx
->opcode
);
29127 case OPC_BLTZL
: /* REGIMM branches */
29131 check_insn(ctx
, ISA_MIPS2
);
29132 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29136 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29140 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29142 /* OPC_NAL, OPC_BAL */
29143 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
29145 generate_exception_end(ctx
, EXCP_RI
);
29148 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29151 case OPC_TGEI
: /* REGIMM traps */
29158 check_insn(ctx
, ISA_MIPS2
);
29159 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29160 gen_trap(ctx
, op1
, rs
, -1, imm
);
29163 check_insn(ctx
, ISA_MIPS32R6
);
29164 generate_exception_end(ctx
, EXCP_RI
);
29167 check_insn(ctx
, ISA_MIPS32R2
);
29168 /* Break the TB to be able to sync copied instructions
29170 ctx
->base
.is_jmp
= DISAS_STOP
;
29172 case OPC_BPOSGE32
: /* MIPS DSP branch */
29173 #if defined(TARGET_MIPS64)
29177 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
29179 #if defined(TARGET_MIPS64)
29181 check_insn(ctx
, ISA_MIPS32R6
);
29182 check_mips_64(ctx
);
29184 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
29188 check_insn(ctx
, ISA_MIPS32R6
);
29189 check_mips_64(ctx
);
29191 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
29195 default: /* Invalid */
29196 MIPS_INVAL("regimm");
29197 generate_exception_end(ctx
, EXCP_RI
);
29202 check_cp0_enabled(ctx
);
29203 op1
= MASK_CP0(ctx
->opcode
);
29211 #if defined(TARGET_MIPS64)
29215 #ifndef CONFIG_USER_ONLY
29216 gen_cp0(env
, ctx
, op1
, rt
, rd
);
29217 #endif /* !CONFIG_USER_ONLY */
29235 #ifndef CONFIG_USER_ONLY
29236 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
29237 #endif /* !CONFIG_USER_ONLY */
29240 #ifndef CONFIG_USER_ONLY
29243 TCGv t0
= tcg_temp_new();
29245 op2
= MASK_MFMC0(ctx
->opcode
);
29249 gen_helper_dmt(t0
);
29250 gen_store_gpr(t0
, rt
);
29254 gen_helper_emt(t0
);
29255 gen_store_gpr(t0
, rt
);
29259 gen_helper_dvpe(t0
, cpu_env
);
29260 gen_store_gpr(t0
, rt
);
29264 gen_helper_evpe(t0
, cpu_env
);
29265 gen_store_gpr(t0
, rt
);
29268 check_insn(ctx
, ISA_MIPS32R6
);
29270 gen_helper_dvp(t0
, cpu_env
);
29271 gen_store_gpr(t0
, rt
);
29275 check_insn(ctx
, ISA_MIPS32R6
);
29277 gen_helper_evp(t0
, cpu_env
);
29278 gen_store_gpr(t0
, rt
);
29282 check_insn(ctx
, ISA_MIPS32R2
);
29283 save_cpu_state(ctx
, 1);
29284 gen_helper_di(t0
, cpu_env
);
29285 gen_store_gpr(t0
, rt
);
29286 /* Stop translation as we may have switched
29287 the execution mode. */
29288 ctx
->base
.is_jmp
= DISAS_STOP
;
29291 check_insn(ctx
, ISA_MIPS32R2
);
29292 save_cpu_state(ctx
, 1);
29293 gen_helper_ei(t0
, cpu_env
);
29294 gen_store_gpr(t0
, rt
);
29295 /* DISAS_STOP isn't sufficient, we need to ensure we break
29296 out of translated code to check for pending interrupts */
29297 gen_save_pc(ctx
->base
.pc_next
+ 4);
29298 ctx
->base
.is_jmp
= DISAS_EXIT
;
29300 default: /* Invalid */
29301 MIPS_INVAL("mfmc0");
29302 generate_exception_end(ctx
, EXCP_RI
);
29307 #endif /* !CONFIG_USER_ONLY */
29310 check_insn(ctx
, ISA_MIPS32R2
);
29311 gen_load_srsgpr(rt
, rd
);
29314 check_insn(ctx
, ISA_MIPS32R2
);
29315 gen_store_srsgpr(rt
, rd
);
29319 generate_exception_end(ctx
, EXCP_RI
);
29323 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29324 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29325 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29326 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29329 /* Arithmetic with immediate opcode */
29330 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29334 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29336 case OPC_SLTI
: /* Set on less than with immediate opcode */
29338 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29340 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29341 case OPC_LUI
: /* OPC_AUI */
29344 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29346 case OPC_J
: /* Jump */
29348 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29349 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29352 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29353 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29355 generate_exception_end(ctx
, EXCP_RI
);
29358 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29359 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29362 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29365 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29366 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29368 generate_exception_end(ctx
, EXCP_RI
);
29371 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29372 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29375 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29378 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29381 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29383 check_insn(ctx
, ISA_MIPS32R6
);
29384 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29385 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29388 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29391 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29393 check_insn(ctx
, ISA_MIPS32R6
);
29394 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29395 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29400 check_insn(ctx
, ISA_MIPS2
);
29401 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29405 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29407 case OPC_LL
: /* Load and stores */
29408 check_insn(ctx
, ISA_MIPS2
);
29409 if (ctx
->insn_flags
& INSN_R5900
) {
29410 check_insn_opc_user_only(ctx
, INSN_R5900
);
29415 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29423 gen_ld(ctx
, op
, rt
, rs
, imm
);
29427 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29432 gen_st(ctx
, op
, rt
, rs
, imm
);
29435 check_insn(ctx
, ISA_MIPS2
);
29436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29437 if (ctx
->insn_flags
& INSN_R5900
) {
29438 check_insn_opc_user_only(ctx
, INSN_R5900
);
29440 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
29443 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29444 check_cp0_enabled(ctx
);
29445 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29446 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29447 gen_cache_operation(ctx
, rt
, rs
, imm
);
29449 /* Treat as NOP. */
29452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29453 if (ctx
->insn_flags
& INSN_R5900
) {
29454 /* Treat as NOP. */
29456 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29457 /* Treat as NOP. */
29461 /* Floating point (COP1). */
29466 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29470 op1
= MASK_CP1(ctx
->opcode
);
29475 check_cp1_enabled(ctx
);
29476 check_insn(ctx
, ISA_MIPS32R2
);
29482 check_cp1_enabled(ctx
);
29483 gen_cp1(ctx
, op1
, rt
, rd
);
29485 #if defined(TARGET_MIPS64)
29488 check_cp1_enabled(ctx
);
29489 check_insn(ctx
, ISA_MIPS3
);
29490 check_mips_64(ctx
);
29491 gen_cp1(ctx
, op1
, rt
, rd
);
29494 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29495 check_cp1_enabled(ctx
);
29496 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29498 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29503 check_insn(ctx
, ASE_MIPS3D
);
29504 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29505 (rt
>> 2) & 0x7, imm
<< 2);
29509 check_cp1_enabled(ctx
);
29510 check_insn(ctx
, ISA_MIPS32R6
);
29511 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29515 check_cp1_enabled(ctx
);
29516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29518 check_insn(ctx
, ASE_MIPS3D
);
29521 check_cp1_enabled(ctx
);
29522 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29523 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29524 (rt
>> 2) & 0x7, imm
<< 2);
29531 check_cp1_enabled(ctx
);
29532 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29538 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29539 check_cp1_enabled(ctx
);
29540 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29542 case R6_OPC_CMP_AF_S
:
29543 case R6_OPC_CMP_UN_S
:
29544 case R6_OPC_CMP_EQ_S
:
29545 case R6_OPC_CMP_UEQ_S
:
29546 case R6_OPC_CMP_LT_S
:
29547 case R6_OPC_CMP_ULT_S
:
29548 case R6_OPC_CMP_LE_S
:
29549 case R6_OPC_CMP_ULE_S
:
29550 case R6_OPC_CMP_SAF_S
:
29551 case R6_OPC_CMP_SUN_S
:
29552 case R6_OPC_CMP_SEQ_S
:
29553 case R6_OPC_CMP_SEUQ_S
:
29554 case R6_OPC_CMP_SLT_S
:
29555 case R6_OPC_CMP_SULT_S
:
29556 case R6_OPC_CMP_SLE_S
:
29557 case R6_OPC_CMP_SULE_S
:
29558 case R6_OPC_CMP_OR_S
:
29559 case R6_OPC_CMP_UNE_S
:
29560 case R6_OPC_CMP_NE_S
:
29561 case R6_OPC_CMP_SOR_S
:
29562 case R6_OPC_CMP_SUNE_S
:
29563 case R6_OPC_CMP_SNE_S
:
29564 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29566 case R6_OPC_CMP_AF_D
:
29567 case R6_OPC_CMP_UN_D
:
29568 case R6_OPC_CMP_EQ_D
:
29569 case R6_OPC_CMP_UEQ_D
:
29570 case R6_OPC_CMP_LT_D
:
29571 case R6_OPC_CMP_ULT_D
:
29572 case R6_OPC_CMP_LE_D
:
29573 case R6_OPC_CMP_ULE_D
:
29574 case R6_OPC_CMP_SAF_D
:
29575 case R6_OPC_CMP_SUN_D
:
29576 case R6_OPC_CMP_SEQ_D
:
29577 case R6_OPC_CMP_SEUQ_D
:
29578 case R6_OPC_CMP_SLT_D
:
29579 case R6_OPC_CMP_SULT_D
:
29580 case R6_OPC_CMP_SLE_D
:
29581 case R6_OPC_CMP_SULE_D
:
29582 case R6_OPC_CMP_OR_D
:
29583 case R6_OPC_CMP_UNE_D
:
29584 case R6_OPC_CMP_NE_D
:
29585 case R6_OPC_CMP_SOR_D
:
29586 case R6_OPC_CMP_SUNE_D
:
29587 case R6_OPC_CMP_SNE_D
:
29588 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29591 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29592 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29597 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29612 check_insn(ctx
, ASE_MSA
);
29613 gen_msa_branch(env
, ctx
, op1
);
29617 generate_exception_end(ctx
, EXCP_RI
);
29622 /* Compact branches [R6] and COP2 [non-R6] */
29623 case OPC_BC
: /* OPC_LWC2 */
29624 case OPC_BALC
: /* OPC_SWC2 */
29625 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29626 /* OPC_BC, OPC_BALC */
29627 gen_compute_compact_branch(ctx
, op
, 0, 0,
29628 sextract32(ctx
->opcode
<< 2, 0, 28));
29630 /* OPC_LWC2, OPC_SWC2 */
29631 /* COP2: Not implemented. */
29632 generate_exception_err(ctx
, EXCP_CpU
, 2);
29635 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29636 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29637 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29639 /* OPC_BEQZC, OPC_BNEZC */
29640 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29641 sextract32(ctx
->opcode
<< 2, 0, 23));
29643 /* OPC_JIC, OPC_JIALC */
29644 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29647 /* OPC_LWC2, OPC_SWC2 */
29648 /* COP2: Not implemented. */
29649 generate_exception_err(ctx
, EXCP_CpU
, 2);
29653 check_insn(ctx
, INSN_LOONGSON2F
);
29654 /* Note that these instructions use different fields. */
29655 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29659 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29660 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29661 check_cp1_enabled(ctx
);
29662 op1
= MASK_CP3(ctx
->opcode
);
29666 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29672 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29673 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29676 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29677 /* Treat as NOP. */
29680 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29694 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29695 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29699 generate_exception_end(ctx
, EXCP_RI
);
29703 generate_exception_err(ctx
, EXCP_CpU
, 1);
29707 #if defined(TARGET_MIPS64)
29708 /* MIPS64 opcodes */
29710 if (ctx
->insn_flags
& INSN_R5900
) {
29711 check_insn_opc_user_only(ctx
, INSN_R5900
);
29716 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29720 check_insn(ctx
, ISA_MIPS3
);
29721 check_mips_64(ctx
);
29722 gen_ld(ctx
, op
, rt
, rs
, imm
);
29726 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29729 check_insn(ctx
, ISA_MIPS3
);
29730 check_mips_64(ctx
);
29731 gen_st(ctx
, op
, rt
, rs
, imm
);
29734 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29735 check_insn(ctx
, ISA_MIPS3
);
29736 if (ctx
->insn_flags
& INSN_R5900
) {
29737 check_insn_opc_user_only(ctx
, INSN_R5900
);
29739 check_mips_64(ctx
);
29740 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
29742 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29743 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29744 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29745 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29748 check_insn(ctx
, ISA_MIPS3
);
29749 check_mips_64(ctx
);
29750 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29754 check_insn(ctx
, ISA_MIPS3
);
29755 check_mips_64(ctx
);
29756 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29759 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29760 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29761 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29763 MIPS_INVAL("major opcode");
29764 generate_exception_end(ctx
, EXCP_RI
);
29768 case OPC_DAUI
: /* OPC_JALX */
29769 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29770 #if defined(TARGET_MIPS64)
29772 check_mips_64(ctx
);
29774 generate_exception(ctx
, EXCP_RI
);
29775 } else if (rt
!= 0) {
29776 TCGv t0
= tcg_temp_new();
29777 gen_load_gpr(t0
, rs
);
29778 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29782 generate_exception_end(ctx
, EXCP_RI
);
29783 MIPS_INVAL("major opcode");
29787 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29788 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29789 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29792 case OPC_MSA
: /* OPC_MDMX */
29793 if (ctx
->insn_flags
& INSN_R5900
) {
29794 #if defined(TARGET_MIPS64)
29795 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29798 /* MDMX: Not implemented. */
29803 check_insn(ctx
, ISA_MIPS32R6
);
29804 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29806 default: /* Invalid */
29807 MIPS_INVAL("major opcode");
29808 generate_exception_end(ctx
, EXCP_RI
);
29813 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29815 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29816 CPUMIPSState
*env
= cs
->env_ptr
;
29818 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29819 ctx
->saved_pc
= -1;
29820 ctx
->insn_flags
= env
->insn_flags
;
29821 ctx
->CP0_Config1
= env
->CP0_Config1
;
29822 ctx
->CP0_Config2
= env
->CP0_Config2
;
29823 ctx
->CP0_Config3
= env
->CP0_Config3
;
29824 ctx
->CP0_Config5
= env
->CP0_Config5
;
29826 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29827 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29828 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29829 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29830 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29831 ctx
->PAMask
= env
->PAMask
;
29832 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29833 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29834 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29835 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29836 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29837 /* Restore delay slot state from the tb context. */
29838 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29839 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29840 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29841 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29842 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29843 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29844 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29845 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29846 restore_cpu_state(env
, ctx
);
29847 #ifdef CONFIG_USER_ONLY
29848 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29850 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29852 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29853 MO_UNALN
: MO_ALIGN
;
29855 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29859 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29863 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29865 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29867 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29871 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29872 const CPUBreakpoint
*bp
)
29874 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29876 save_cpu_state(ctx
, 1);
29877 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29878 gen_helper_raise_exception_debug(cpu_env
);
29879 /* The address covered by the breakpoint must be included in
29880 [tb->pc, tb->pc + tb->size) in order to for it to be
29881 properly cleared -- thus we increment the PC here so that
29882 the logic setting tb->size below does the right thing. */
29883 ctx
->base
.pc_next
+= 4;
29887 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
29889 CPUMIPSState
*env
= cs
->env_ptr
;
29890 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29894 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
29895 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
29896 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29897 insn_bytes
= decode_nanomips_opc(env
, ctx
);
29898 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
29899 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
29901 decode_opc(env
, ctx
);
29902 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
29903 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29904 insn_bytes
= decode_micromips_opc(env
, ctx
);
29905 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
29906 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
29907 insn_bytes
= decode_mips16_opc(env
, ctx
);
29909 generate_exception_end(ctx
, EXCP_RI
);
29910 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
29914 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
29915 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
29916 MIPS_HFLAG_FBNSLOT
))) {
29917 /* force to generate branch as there is neither delay nor
29921 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
29922 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
29923 /* Force to generate branch as microMIPS R6 doesn't restrict
29924 branches in the forbidden slot. */
29929 gen_branch(ctx
, insn_bytes
);
29931 ctx
->base
.pc_next
+= insn_bytes
;
29933 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
29936 /* Execute a branch and its delay slot as a single instruction.
29937 This is what GDB expects and is consistent with what the
29938 hardware does (e.g. if a delay slot instruction faults, the
29939 reported PC is the PC of the branch). */
29940 if (ctx
->base
.singlestep_enabled
&&
29941 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
29942 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29944 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
29945 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
29949 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
29951 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29953 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
29954 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
29955 gen_helper_raise_exception_debug(cpu_env
);
29957 switch (ctx
->base
.is_jmp
) {
29959 gen_save_pc(ctx
->base
.pc_next
);
29960 tcg_gen_lookup_and_goto_ptr();
29963 case DISAS_TOO_MANY
:
29964 save_cpu_state(ctx
, 0);
29965 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
29968 tcg_gen_exit_tb(NULL
, 0);
29970 case DISAS_NORETURN
:
29973 g_assert_not_reached();
29978 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
29980 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
29981 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
29984 static const TranslatorOps mips_tr_ops
= {
29985 .init_disas_context
= mips_tr_init_disas_context
,
29986 .tb_start
= mips_tr_tb_start
,
29987 .insn_start
= mips_tr_insn_start
,
29988 .breakpoint_check
= mips_tr_breakpoint_check
,
29989 .translate_insn
= mips_tr_translate_insn
,
29990 .tb_stop
= mips_tr_tb_stop
,
29991 .disas_log
= mips_tr_disas_log
,
29994 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
29998 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
30001 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, int flags
)
30004 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
30006 #define printfpr(fp) \
30009 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30010 " fd:%13g fs:%13g psu: %13g\n", \
30011 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
30012 (double)(fp)->fd, \
30013 (double)(fp)->fs[FP_ENDIAN_IDX], \
30014 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
30017 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
30018 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
30019 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30020 " fd:%13g fs:%13g psu:%13g\n", \
30021 tmp.w[FP_ENDIAN_IDX], tmp.d, \
30023 (double)tmp.fs[FP_ENDIAN_IDX], \
30024 (double)tmp.fs[!FP_ENDIAN_IDX]); \
30030 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
30031 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
30032 get_float_exception_flags(&env
->active_fpu
.fp_status
));
30033 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
30034 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
30035 printfpr(&env
->active_fpu
.fpr
[i
]);
30041 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
30043 MIPSCPU
*cpu
= MIPS_CPU(cs
);
30044 CPUMIPSState
*env
= &cpu
->env
;
30047 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
30048 " LO=0x" TARGET_FMT_lx
" ds %04x "
30049 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
30050 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
30051 env
->hflags
, env
->btarget
, env
->bcond
);
30052 for (i
= 0; i
< 32; i
++) {
30053 if ((i
& 3) == 0) {
30054 qemu_fprintf(f
, "GPR%02d:", i
);
30056 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
30057 regnames
[i
], env
->active_tc
.gpr
[i
]);
30058 if ((i
& 3) == 3) {
30059 qemu_fprintf(f
, "\n");
30063 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
30064 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
30065 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
30067 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
30068 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
30069 env
->CP0_Config2
, env
->CP0_Config3
);
30070 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
30071 env
->CP0_Config4
, env
->CP0_Config5
);
30072 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
30073 fpu_dump_state(env
, f
, flags
);
30077 void mips_tcg_init(void)
30082 for (i
= 1; i
< 32; i
++)
30083 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30084 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
30087 for (i
= 0; i
< 32; i
++) {
30088 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
30090 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
30091 /* The scalar floating-point unit (FPU) registers are mapped on
30092 * the MSA vector registers. */
30093 fpu_f64
[i
] = msa_wr_d
[i
* 2];
30094 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
30095 msa_wr_d
[i
* 2 + 1] =
30096 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
30099 cpu_PC
= tcg_global_mem_new(cpu_env
,
30100 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
30101 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
30102 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
30103 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
30105 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
30106 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
30109 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
30110 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
30112 bcond
= tcg_global_mem_new(cpu_env
,
30113 offsetof(CPUMIPSState
, bcond
), "bcond");
30114 btarget
= tcg_global_mem_new(cpu_env
,
30115 offsetof(CPUMIPSState
, btarget
), "btarget");
30116 hflags
= tcg_global_mem_new_i32(cpu_env
,
30117 offsetof(CPUMIPSState
, hflags
), "hflags");
30119 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
30120 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
30122 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
30123 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
30125 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
30127 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
30130 #if defined(TARGET_MIPS64)
30132 for (i
= 1; i
< 32; i
++) {
30133 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
30134 offsetof(CPUMIPSState
,
30140 #if !defined(TARGET_MIPS64)
30141 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
30142 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30143 offsetof(CPUMIPSState
,
30144 active_tc
.mxu_gpr
[i
]),
30148 mxu_CR
= tcg_global_mem_new(cpu_env
,
30149 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
30150 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
30154 #include "translate_init.inc.c"
30156 void cpu_mips_realize_env(CPUMIPSState
*env
)
30158 env
->exception_base
= (int32_t)0xBFC00000;
30160 #ifndef CONFIG_USER_ONLY
30161 mmu_init(env
, env
->cpu_model
);
30163 fpu_init(env
, env
->cpu_model
);
30164 mvp_init(env
, env
->cpu_model
);
30167 bool cpu_supports_cps_smp(const char *cpu_type
)
30169 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30170 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
30173 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
30175 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30176 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
30179 void cpu_set_exception_base(int vp_index
, target_ulong address
)
30181 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
30182 vp
->env
.exception_base
= address
;
30185 void cpu_state_reset(CPUMIPSState
*env
)
30187 CPUState
*cs
= env_cpu(env
);
30189 /* Reset registers to their default values */
30190 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
30191 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
30192 #ifdef TARGET_WORDS_BIGENDIAN
30193 env
->CP0_Config0
|= (1 << CP0C0_BE
);
30195 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
30196 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
30197 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
30198 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
30199 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
30200 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
30201 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
30202 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
30203 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
30204 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
30205 << env
->cpu_model
->CP0_LLAddr_shift
;
30206 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
30207 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
30208 env
->CCRes
= env
->cpu_model
->CCRes
;
30209 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
30210 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
30211 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
30212 env
->current_tc
= 0;
30213 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
30214 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
30215 #if defined(TARGET_MIPS64)
30216 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
30217 env
->SEGMask
|= 3ULL << 62;
30220 env
->PABITS
= env
->cpu_model
->PABITS
;
30221 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
30222 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
30223 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
30224 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
30225 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
30226 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
30227 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
30228 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
30229 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
30230 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
30231 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
30232 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
30233 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
30234 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
30235 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
30236 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
30237 env
->msair
= env
->cpu_model
->MSAIR
;
30238 env
->insn_flags
= env
->cpu_model
->insn_flags
;
30240 #if defined(CONFIG_USER_ONLY)
30241 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
30242 # ifdef TARGET_MIPS64
30243 /* Enable 64-bit register mode. */
30244 env
->CP0_Status
|= (1 << CP0St_PX
);
30246 # ifdef TARGET_ABI_MIPSN64
30247 /* Enable 64-bit address mode. */
30248 env
->CP0_Status
|= (1 << CP0St_UX
);
30250 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
30251 hardware registers. */
30252 env
->CP0_HWREna
|= 0x0000000F;
30253 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
30254 env
->CP0_Status
|= (1 << CP0St_CU1
);
30256 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
30257 env
->CP0_Status
|= (1 << CP0St_MX
);
30259 # if defined(TARGET_MIPS64)
30260 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
30261 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
30262 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
30263 env
->CP0_Status
|= (1 << CP0St_FR
);
30267 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
30268 /* If the exception was raised from a delay slot,
30269 come back to the jump. */
30270 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
30271 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30273 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30275 env
->active_tc
.PC
= env
->exception_base
;
30276 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30277 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30278 env
->CP0_Wired
= 0;
30279 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30280 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30281 if (mips_um_ksegs_enabled()) {
30282 env
->CP0_EBase
|= 0x40000000;
30284 env
->CP0_EBase
|= (int32_t)0x80000000;
30286 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30287 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30289 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30291 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30292 /* vectored interrupts not implemented, timer on int 7,
30293 no performance counters. */
30294 env
->CP0_IntCtl
= 0xe0000000;
30298 for (i
= 0; i
< 7; i
++) {
30299 env
->CP0_WatchLo
[i
] = 0;
30300 env
->CP0_WatchHi
[i
] = 0x80000000;
30302 env
->CP0_WatchLo
[7] = 0;
30303 env
->CP0_WatchHi
[7] = 0;
30305 /* Count register increments in debug mode, EJTAG version 1 */
30306 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30308 cpu_mips_store_count(env
, 1);
30310 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30313 /* Only TC0 on VPE 0 starts as active. */
30314 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30315 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30316 env
->tcs
[i
].CP0_TCHalt
= 1;
30318 env
->active_tc
.CP0_TCHalt
= 1;
30321 if (cs
->cpu_index
== 0) {
30322 /* VPE0 starts up enabled. */
30323 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30324 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30326 /* TC0 starts up unhalted. */
30328 env
->active_tc
.CP0_TCHalt
= 0;
30329 env
->tcs
[0].CP0_TCHalt
= 0;
30330 /* With thread 0 active. */
30331 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30332 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30337 * Configure default legacy segmentation control. We use this regardless of
30338 * whether segmentation control is presented to the guest.
30340 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30341 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30342 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30343 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30344 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30345 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30347 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30348 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30349 (3 << CP0SC_C
)) << 16;
30350 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30351 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30352 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30353 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30354 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30355 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30356 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30357 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30359 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30360 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30361 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30362 env
->CP0_Status
|= (1 << CP0St_FR
);
30365 if (env
->insn_flags
& ISA_MIPS32R6
) {
30367 env
->CP0_PWSize
= 0x40;
30373 env
->CP0_PWField
= 0x0C30C302;
30380 env
->CP0_PWField
= 0x02;
30383 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30384 /* microMIPS on reset when Config3.ISA is 3 */
30385 env
->hflags
|= MIPS_HFLAG_M16
;
30389 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30393 compute_hflags(env
);
30394 restore_fp_status(env
);
30395 restore_pamask(env
);
30396 cs
->exception_index
= EXCP_NONE
;
30398 if (semihosting_get_argc()) {
30399 /* UHI interface can be used to obtain argc and argv */
30400 env
->active_tc
.gpr
[4] = -1;
30404 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30405 target_ulong
*data
)
30407 env
->active_tc
.PC
= data
[0];
30408 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30409 env
->hflags
|= data
[1];
30410 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30411 case MIPS_HFLAG_BR
:
30413 case MIPS_HFLAG_BC
:
30414 case MIPS_HFLAG_BL
:
30416 env
->btarget
= data
[2];