2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "hw/semihosting/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
41 #include "qemu/qemu-print.h"
43 #define MIPS_DEBUG_DISAS 0
45 /* MIPS major opcodes */
46 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
49 /* indirect opcode tables */
50 OPC_SPECIAL
= (0x00 << 26),
51 OPC_REGIMM
= (0x01 << 26),
52 OPC_CP0
= (0x10 << 26),
53 OPC_CP1
= (0x11 << 26),
54 OPC_CP2
= (0x12 << 26),
55 OPC_CP3
= (0x13 << 26),
56 OPC_SPECIAL2
= (0x1C << 26),
57 OPC_SPECIAL3
= (0x1F << 26),
58 /* arithmetic with immediate */
59 OPC_ADDI
= (0x08 << 26),
60 OPC_ADDIU
= (0x09 << 26),
61 OPC_SLTI
= (0x0A << 26),
62 OPC_SLTIU
= (0x0B << 26),
63 /* logic with immediate */
64 OPC_ANDI
= (0x0C << 26),
65 OPC_ORI
= (0x0D << 26),
66 OPC_XORI
= (0x0E << 26),
67 OPC_LUI
= (0x0F << 26),
68 /* arithmetic with immediate */
69 OPC_DADDI
= (0x18 << 26),
70 OPC_DADDIU
= (0x19 << 26),
71 /* Jump and branches */
73 OPC_JAL
= (0x03 << 26),
74 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
75 OPC_BEQL
= (0x14 << 26),
76 OPC_BNE
= (0x05 << 26),
77 OPC_BNEL
= (0x15 << 26),
78 OPC_BLEZ
= (0x06 << 26),
79 OPC_BLEZL
= (0x16 << 26),
80 OPC_BGTZ
= (0x07 << 26),
81 OPC_BGTZL
= (0x17 << 26),
82 OPC_JALX
= (0x1D << 26),
83 OPC_DAUI
= (0x1D << 26),
85 OPC_LDL
= (0x1A << 26),
86 OPC_LDR
= (0x1B << 26),
87 OPC_LB
= (0x20 << 26),
88 OPC_LH
= (0x21 << 26),
89 OPC_LWL
= (0x22 << 26),
90 OPC_LW
= (0x23 << 26),
91 OPC_LWPC
= OPC_LW
| 0x5,
92 OPC_LBU
= (0x24 << 26),
93 OPC_LHU
= (0x25 << 26),
94 OPC_LWR
= (0x26 << 26),
95 OPC_LWU
= (0x27 << 26),
96 OPC_SB
= (0x28 << 26),
97 OPC_SH
= (0x29 << 26),
98 OPC_SWL
= (0x2A << 26),
99 OPC_SW
= (0x2B << 26),
100 OPC_SDL
= (0x2C << 26),
101 OPC_SDR
= (0x2D << 26),
102 OPC_SWR
= (0x2E << 26),
103 OPC_LL
= (0x30 << 26),
104 OPC_LLD
= (0x34 << 26),
105 OPC_LD
= (0x37 << 26),
106 OPC_LDPC
= OPC_LD
| 0x5,
107 OPC_SC
= (0x38 << 26),
108 OPC_SCD
= (0x3C << 26),
109 OPC_SD
= (0x3F << 26),
110 /* Floating point load/store */
111 OPC_LWC1
= (0x31 << 26),
112 OPC_LWC2
= (0x32 << 26),
113 OPC_LDC1
= (0x35 << 26),
114 OPC_LDC2
= (0x36 << 26),
115 OPC_SWC1
= (0x39 << 26),
116 OPC_SWC2
= (0x3A << 26),
117 OPC_SDC1
= (0x3D << 26),
118 OPC_SDC2
= (0x3E << 26),
119 /* Compact Branches */
120 OPC_BLEZALC
= (0x06 << 26),
121 OPC_BGEZALC
= (0x06 << 26),
122 OPC_BGEUC
= (0x06 << 26),
123 OPC_BGTZALC
= (0x07 << 26),
124 OPC_BLTZALC
= (0x07 << 26),
125 OPC_BLTUC
= (0x07 << 26),
126 OPC_BOVC
= (0x08 << 26),
127 OPC_BEQZALC
= (0x08 << 26),
128 OPC_BEQC
= (0x08 << 26),
129 OPC_BLEZC
= (0x16 << 26),
130 OPC_BGEZC
= (0x16 << 26),
131 OPC_BGEC
= (0x16 << 26),
132 OPC_BGTZC
= (0x17 << 26),
133 OPC_BLTZC
= (0x17 << 26),
134 OPC_BLTC
= (0x17 << 26),
135 OPC_BNVC
= (0x18 << 26),
136 OPC_BNEZALC
= (0x18 << 26),
137 OPC_BNEC
= (0x18 << 26),
138 OPC_BC
= (0x32 << 26),
139 OPC_BEQZC
= (0x36 << 26),
140 OPC_JIC
= (0x36 << 26),
141 OPC_BALC
= (0x3A << 26),
142 OPC_BNEZC
= (0x3E << 26),
143 OPC_JIALC
= (0x3E << 26),
144 /* MDMX ASE specific */
145 OPC_MDMX
= (0x1E << 26),
146 /* MSA ASE, same as MDMX */
148 /* Cache and prefetch */
149 OPC_CACHE
= (0x2F << 26),
150 OPC_PREF
= (0x33 << 26),
151 /* PC-relative address computation / loads */
152 OPC_PCREL
= (0x3B << 26),
155 /* PC-relative address computation / loads */
156 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
157 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
159 /* Instructions determined by bits 19 and 20 */
160 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
161 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
162 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
164 /* Instructions determined by bits 16 ... 20 */
165 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
166 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
169 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
172 /* MIPS special opcodes */
173 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
177 OPC_SLL
= 0x00 | OPC_SPECIAL
,
178 /* NOP is SLL r0, r0, 0 */
179 /* SSNOP is SLL r0, r0, 1 */
180 /* EHB is SLL r0, r0, 3 */
181 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
182 OPC_ROTR
= OPC_SRL
| (1 << 21),
183 OPC_SRA
= 0x03 | OPC_SPECIAL
,
184 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
185 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
186 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
187 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
188 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
189 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
190 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
191 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
192 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
193 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
194 OPC_DROTR
= OPC_DSRL
| (1 << 21),
195 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
196 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
197 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
198 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
199 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
200 /* Multiplication / division */
201 OPC_MULT
= 0x18 | OPC_SPECIAL
,
202 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
203 OPC_DIV
= 0x1A | OPC_SPECIAL
,
204 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
205 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
206 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
207 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
208 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
210 /* 2 registers arithmetic / logic */
211 OPC_ADD
= 0x20 | OPC_SPECIAL
,
212 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
213 OPC_SUB
= 0x22 | OPC_SPECIAL
,
214 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
215 OPC_AND
= 0x24 | OPC_SPECIAL
,
216 OPC_OR
= 0x25 | OPC_SPECIAL
,
217 OPC_XOR
= 0x26 | OPC_SPECIAL
,
218 OPC_NOR
= 0x27 | OPC_SPECIAL
,
219 OPC_SLT
= 0x2A | OPC_SPECIAL
,
220 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
221 OPC_DADD
= 0x2C | OPC_SPECIAL
,
222 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
223 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
224 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
226 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
227 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
229 OPC_TGE
= 0x30 | OPC_SPECIAL
,
230 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
231 OPC_TLT
= 0x32 | OPC_SPECIAL
,
232 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
233 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
234 OPC_TNE
= 0x36 | OPC_SPECIAL
,
235 /* HI / LO registers load & stores */
236 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
237 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
238 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
239 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
240 /* Conditional moves */
241 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
242 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
244 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
245 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
247 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
250 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
251 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
252 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
253 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
254 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
256 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
257 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
258 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
259 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
263 * R6 Multiply and Divide instructions have the same opcode
264 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
266 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
269 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
270 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
271 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
272 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
273 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
274 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
275 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
276 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
278 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
279 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
280 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
281 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
282 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
283 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
284 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
285 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
287 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
288 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
289 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
290 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
291 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
293 OPC_LSA
= 0x05 | OPC_SPECIAL
,
294 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
297 /* Multiplication variants of the vr54xx. */
298 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
301 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
304 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
305 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
306 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
307 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
308 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
309 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
310 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
311 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
312 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
313 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
314 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
317 /* REGIMM (rt field) opcodes */
318 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
321 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
322 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
323 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
324 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
325 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
326 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
327 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
328 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
329 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
330 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
331 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
332 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
333 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
334 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
335 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
336 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
338 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
339 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
342 /* Special2 opcodes */
343 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
346 /* Multiply & xxx operations */
347 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
348 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
349 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
350 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
351 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
353 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
354 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
355 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
356 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
357 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
358 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
359 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
360 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
361 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
362 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
363 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
364 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
366 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
367 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
368 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
369 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
371 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
374 /* Special3 opcodes */
375 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
378 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
379 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
380 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
381 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
382 OPC_INS
= 0x04 | OPC_SPECIAL3
,
383 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
384 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
385 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
386 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
387 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
388 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
389 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
390 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
393 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
394 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
395 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
396 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
397 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
398 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
399 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
400 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
401 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
402 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
403 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
404 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
407 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
408 /* MIPS DSP Arithmetic */
409 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
410 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
411 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
412 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
413 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
414 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
415 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
416 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
417 /* MIPS DSP GPR-Based Shift Sub-class */
418 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
419 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
420 /* MIPS DSP Multiply Sub-class insns */
421 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
422 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
423 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
424 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
425 /* DSP Bit/Manipulation Sub-class */
426 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
427 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
428 /* MIPS DSP Append Sub-class */
429 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
430 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
431 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
432 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
433 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
436 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
437 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
438 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
439 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
440 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
441 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
442 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
443 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
444 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
445 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
446 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
447 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
448 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
449 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
450 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
451 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
454 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
455 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
456 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
457 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
458 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
459 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
463 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
466 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
467 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
468 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
469 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
470 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
471 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
472 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
473 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
477 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
480 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
481 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
482 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
483 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
484 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
485 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
486 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
487 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
488 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
489 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
490 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
493 /* MIPS DSP REGIMM opcodes */
495 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
496 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
499 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
502 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
503 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
504 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
505 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
508 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
510 /* MIPS DSP Arithmetic Sub-class */
511 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
512 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
513 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
514 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
515 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
516 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
517 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
518 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
519 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
520 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
521 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
522 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
523 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
524 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
525 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
526 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
527 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
528 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
529 /* MIPS DSP Multiply Sub-class insns */
530 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
531 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
532 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
533 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
534 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
535 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
538 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
539 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
541 /* MIPS DSP Arithmetic Sub-class */
542 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
543 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
544 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
545 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
546 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
547 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
548 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
549 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
550 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
551 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
552 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
553 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
554 /* MIPS DSP Multiply Sub-class insns */
555 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
556 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
557 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
558 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
561 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
563 /* MIPS DSP Arithmetic Sub-class */
564 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
565 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
566 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
567 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
568 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
569 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
570 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
571 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
572 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
573 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
574 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
575 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
576 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
577 /* DSP Bit/Manipulation Sub-class */
578 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
579 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
580 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
581 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
582 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
585 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
587 /* MIPS DSP Arithmetic Sub-class */
588 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
589 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
590 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
591 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
592 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
593 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
594 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
595 /* DSP Compare-Pick Sub-class */
596 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
597 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
598 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
599 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
600 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
601 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
602 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
603 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
604 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
605 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
606 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
607 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
608 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
609 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
610 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
613 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
615 /* MIPS DSP GPR-Based Shift Sub-class */
616 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
617 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
618 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
619 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
620 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
621 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
622 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
623 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
624 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
625 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
626 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
627 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
628 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
629 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
630 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
631 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
632 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
633 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
634 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
635 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
636 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
637 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
640 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
642 /* MIPS DSP Multiply Sub-class insns */
643 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
644 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
645 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
646 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
647 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
648 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
649 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
650 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
651 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
652 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
653 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
654 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
655 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
656 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
657 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
658 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
659 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
660 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
661 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
662 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
663 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
664 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
667 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* DSP Bit/Manipulation Sub-class */
670 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
673 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
675 /* MIPS DSP Append Sub-class */
676 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
677 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
678 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
681 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
683 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
684 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
685 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
686 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
687 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
688 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
689 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
690 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
691 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
692 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
693 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
694 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
695 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
696 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
697 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
698 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
699 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
700 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
703 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
705 /* MIPS DSP Arithmetic Sub-class */
706 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
707 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
708 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
709 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
710 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
711 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
712 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
713 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
714 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
715 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
716 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
717 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
718 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
719 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
720 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
721 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
722 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
723 /* DSP Bit/Manipulation Sub-class */
724 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
725 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
726 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
727 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
728 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
729 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
732 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* MIPS DSP Multiply Sub-class insns */
735 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
736 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
737 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
738 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
739 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
740 /* MIPS DSP Arithmetic Sub-class */
741 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
742 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
743 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
744 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
745 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
746 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
747 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
748 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
749 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
750 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
751 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
752 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
753 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
754 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
755 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
756 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
757 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
758 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
759 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
760 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
764 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
766 /* DSP Compare-Pick Sub-class */
767 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
768 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
769 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
770 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
771 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
772 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
773 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
774 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
775 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
776 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
777 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
778 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
779 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
780 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
781 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
782 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
783 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
784 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
785 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
786 /* MIPS DSP Arithmetic Sub-class */
787 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
788 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
789 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
790 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
797 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
799 /* DSP Append Sub-class */
800 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
801 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
802 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
803 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
806 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
809 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
810 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
811 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
812 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
813 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
814 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
815 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
816 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
817 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
818 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
819 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
820 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
821 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
822 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
823 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
824 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
825 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
826 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
827 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
828 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
829 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
832 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
834 /* DSP Bit/Manipulation Sub-class */
835 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
838 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
840 /* MIPS DSP Multiply Sub-class insns */
841 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
842 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
843 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
844 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
845 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
846 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
847 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
848 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
849 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
850 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
851 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
852 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
853 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
854 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
855 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
856 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
857 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
858 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
859 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
860 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
861 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
862 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
863 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
864 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
869 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
871 /* MIPS DSP GPR-Based Shift Sub-class */
872 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
873 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
874 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
875 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
876 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
877 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
878 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
879 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
880 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
881 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
882 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
883 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
884 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
885 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
886 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
887 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
888 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
889 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
890 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
891 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
892 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
893 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
894 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
895 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
900 /* Coprocessor 0 (rs field) */
901 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
904 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
905 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
906 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
907 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
908 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
909 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
910 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
911 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
912 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
913 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
914 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
915 OPC_C0
= (0x10 << 21) | OPC_CP0
,
916 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
917 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
918 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
919 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
920 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
921 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
922 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
923 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
924 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
925 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
926 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
927 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
928 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
929 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
930 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
934 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
937 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
938 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
939 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
940 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
941 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
942 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
943 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
944 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
947 /* Coprocessor 0 (with rs == C0) */
948 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
951 OPC_TLBR
= 0x01 | OPC_C0
,
952 OPC_TLBWI
= 0x02 | OPC_C0
,
953 OPC_TLBINV
= 0x03 | OPC_C0
,
954 OPC_TLBINVF
= 0x04 | OPC_C0
,
955 OPC_TLBWR
= 0x06 | OPC_C0
,
956 OPC_TLBP
= 0x08 | OPC_C0
,
957 OPC_RFE
= 0x10 | OPC_C0
,
958 OPC_ERET
= 0x18 | OPC_C0
,
959 OPC_DERET
= 0x1F | OPC_C0
,
960 OPC_WAIT
= 0x20 | OPC_C0
,
963 /* Coprocessor 1 (rs field) */
964 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
966 /* Values for the fmt field in FP instructions */
968 /* 0 - 15 are reserved */
969 FMT_S
= 16, /* single fp */
970 FMT_D
= 17, /* double fp */
971 FMT_E
= 18, /* extended fp */
972 FMT_Q
= 19, /* quad fp */
973 FMT_W
= 20, /* 32-bit fixed */
974 FMT_L
= 21, /* 64-bit fixed */
975 FMT_PS
= 22, /* paired single fp */
976 /* 23 - 31 are reserved */
980 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
981 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
982 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
983 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
984 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
985 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
986 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
987 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
988 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
989 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
990 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
991 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
992 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
993 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
994 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
995 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
996 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
997 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
998 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
999 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
1000 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
1001 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
1002 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
1003 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
1004 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
1005 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
1006 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
1007 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
1008 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
1009 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
1012 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1013 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1016 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
1017 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
1018 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
1019 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
1023 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
1024 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
1028 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
1029 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
1032 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1035 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
1036 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
1037 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1038 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1039 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1040 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1041 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1042 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1043 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1044 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1045 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1048 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1051 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1052 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1053 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1054 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1055 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1056 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1057 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1058 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1060 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1061 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1062 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1063 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1064 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1065 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1066 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1067 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1069 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1070 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1071 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1072 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1073 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1074 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1075 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1076 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1078 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1079 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1080 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1081 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1082 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1083 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1084 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1085 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1087 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1088 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1089 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1090 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1091 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1092 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1094 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1095 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1096 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1097 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1098 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1099 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1101 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1102 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1103 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1104 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1105 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1106 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1108 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1109 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1110 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1111 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1112 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1113 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1115 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1116 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1117 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1118 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1119 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1120 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1122 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1123 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1124 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1125 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1126 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1127 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1129 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1130 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1131 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1132 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1133 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1134 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1136 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1137 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1138 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1139 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1140 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1141 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1145 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1148 OPC_LWXC1
= 0x00 | OPC_CP3
,
1149 OPC_LDXC1
= 0x01 | OPC_CP3
,
1150 OPC_LUXC1
= 0x05 | OPC_CP3
,
1151 OPC_SWXC1
= 0x08 | OPC_CP3
,
1152 OPC_SDXC1
= 0x09 | OPC_CP3
,
1153 OPC_SUXC1
= 0x0D | OPC_CP3
,
1154 OPC_PREFX
= 0x0F | OPC_CP3
,
1155 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1156 OPC_MADD_S
= 0x20 | OPC_CP3
,
1157 OPC_MADD_D
= 0x21 | OPC_CP3
,
1158 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1159 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1160 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1161 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1162 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1163 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1164 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1165 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1166 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1167 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1171 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1173 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1174 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1175 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1176 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1177 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1178 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1179 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1180 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1181 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1182 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1183 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1184 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1185 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1186 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1187 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1188 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1189 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1190 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1191 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1192 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1193 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1195 /* MI10 instruction */
1196 OPC_LD_B
= (0x20) | OPC_MSA
,
1197 OPC_LD_H
= (0x21) | OPC_MSA
,
1198 OPC_LD_W
= (0x22) | OPC_MSA
,
1199 OPC_LD_D
= (0x23) | OPC_MSA
,
1200 OPC_ST_B
= (0x24) | OPC_MSA
,
1201 OPC_ST_H
= (0x25) | OPC_MSA
,
1202 OPC_ST_W
= (0x26) | OPC_MSA
,
1203 OPC_ST_D
= (0x27) | OPC_MSA
,
1207 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1208 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1209 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1210 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1211 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1212 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1213 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1214 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1215 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1216 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1217 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1218 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1219 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1221 /* I8 instruction */
1222 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1223 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1224 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1225 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1226 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1227 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1228 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1229 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1230 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1231 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1233 /* VEC/2R/2RF instruction */
1234 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1235 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1236 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1237 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1238 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1239 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1240 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1242 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1243 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1245 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1246 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1247 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1248 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1249 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1251 /* 2RF instruction df(bit 16) = _w, _d */
1252 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1253 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1254 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1255 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1256 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1257 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1258 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1259 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1260 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1261 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1262 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1263 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1264 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1265 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1266 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1267 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1269 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1270 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1271 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1272 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1273 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1274 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1275 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1276 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1277 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1278 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1279 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1280 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1281 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1282 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1283 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1284 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1285 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1286 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1287 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1288 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1289 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1290 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1291 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1292 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1293 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1294 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1295 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1296 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1297 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1298 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1299 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1300 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1301 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1302 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1303 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1304 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1305 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1306 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1307 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1308 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1309 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1310 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1311 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1312 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1313 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1314 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1315 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1316 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1317 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1318 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1319 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1320 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1321 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1322 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1323 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1324 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1325 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1326 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1327 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1328 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1329 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1330 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1331 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1332 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1334 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1335 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1336 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1337 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1338 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1339 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1340 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1341 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1342 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1343 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1345 /* 3RF instruction _df(bit 21) = _w, _d */
1346 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1347 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1348 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1349 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1350 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1351 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1352 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1353 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1354 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1355 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1356 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1357 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1358 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1359 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1360 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1361 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1362 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1363 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1364 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1365 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1366 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1367 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1368 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1369 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1370 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1371 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1372 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1373 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1374 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1375 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1376 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1377 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1378 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1379 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1380 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1381 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1382 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1383 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1384 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1385 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1386 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1388 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1389 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1390 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1391 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1392 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1393 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1394 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1395 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1396 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1397 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1398 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1399 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1400 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1406 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1407 * ============================================
1410 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1411 * instructions set. It is designed to fit the needs of signal, graphical and
1412 * video processing applications. MXU instruction set is used in Xburst family
1413 * of microprocessors by Ingenic.
1415 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1416 * the control register.
1419 * The notation used in MXU assembler mnemonics
1420 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1422 * Register operands:
1424 * XRa, XRb, XRc, XRd - MXU registers
1425 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1427 * Non-register operands:
1429 * aptn1 - 1-bit accumulate add/subtract pattern
1430 * aptn2 - 2-bit accumulate add/subtract pattern
1431 * eptn2 - 2-bit execute add/subtract pattern
1432 * optn2 - 2-bit operand pattern
1433 * optn3 - 3-bit operand pattern
1434 * sft4 - 4-bit shift amount
1435 * strd2 - 2-bit stride amount
1439 * Level of parallelism: Operand size:
1440 * S - single operation at a time 32 - word
1441 * D - two operations in parallel 16 - half word
1442 * Q - four operations in parallel 8 - byte
1446 * ADD - Add or subtract
1447 * ADDC - Add with carry-in
1449 * ASUM - Sum together then accumulate (add or subtract)
1450 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1451 * AVG - Average between 2 operands
1452 * ABD - Absolute difference
1454 * AND - Logical bitwise 'and' operation
1456 * EXTR - Extract bits
1457 * I2M - Move from GPR register to MXU register
1458 * LDD - Load data from memory to XRF
1459 * LDI - Load data from memory to XRF (and increase the address base)
1460 * LUI - Load unsigned immediate
1462 * MULU - Unsigned multiply
1463 * MADD - 64-bit operand add 32x32 product
1464 * MSUB - 64-bit operand subtract 32x32 product
1465 * MAC - Multiply and accumulate (add or subtract)
1466 * MAD - Multiply and add or subtract
1467 * MAX - Maximum between 2 operands
1468 * MIN - Minimum between 2 operands
1469 * M2I - Move from MXU register to GPR register
1470 * MOVZ - Move if zero
1471 * MOVN - Move if non-zero
1472 * NOR - Logical bitwise 'nor' operation
1473 * OR - Logical bitwise 'or' operation
1474 * STD - Store data from XRF to memory
1475 * SDI - Store data from XRF to memory (and increase the address base)
1476 * SLT - Set of less than comparison
1477 * SAD - Sum of absolute differences
1478 * SLL - Logical shift left
1479 * SLR - Logical shift right
1480 * SAR - Arithmetic shift right
1483 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1484 * XOR - Logical bitwise 'exclusive or' operation
1488 * E - Expand results
1489 * F - Fixed point multiplication
1490 * L - Low part result
1491 * R - Doing rounding
1492 * V - Variable instead of immediate
1493 * W - Combine above L and V
1496 * The list of MXU instructions grouped by functionality
1497 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1499 * Load/Store instructions Multiplication instructions
1500 * ----------------------- ---------------------------
1502 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1503 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1504 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1505 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1506 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1507 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1508 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1509 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1510 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1511 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1512 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1513 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1514 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1515 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1516 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1517 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1518 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1519 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1520 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1521 * S16SDI XRa, Rb, s10, eptn2
1522 * S8LDD XRa, Rb, s8, eptn3
1523 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1524 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1525 * S8SDI XRa, Rb, s8, eptn3
1526 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1527 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1528 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1529 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1530 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1531 * S32CPS XRa, XRb, XRc
1532 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1533 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1534 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1535 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1536 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1537 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1538 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1539 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1540 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1541 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1542 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1543 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1544 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1545 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1546 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1547 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1548 * Q8SLT XRa, XRb, XRc
1549 * Q8SLTU XRa, XRb, XRc
1550 * Q8MOVZ XRa, XRb, XRc Shift instructions
1551 * Q8MOVN XRa, XRb, XRc ------------------
1553 * D32SLL XRa, XRb, XRc, XRd, sft4
1554 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1555 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1556 * D32SARL XRa, XRb, XRc, sft4
1557 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1558 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1559 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1560 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1561 * Q16SLL XRa, XRb, XRc, XRd, sft4
1562 * Q16SLR XRa, XRb, XRc, XRd, sft4
1563 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1564 * ------------------------- Q16SLLV XRa, XRb, Rb
1565 * Q16SLRV XRa, XRb, Rb
1566 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1567 * S32ALN XRa, XRb, XRc, Rb
1568 * S32ALNI XRa, XRb, XRc, s3
1569 * S32LUI XRa, s8, optn3 Move instructions
1570 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1571 * S32EXTRV XRa, XRb, Rs, Rt
1572 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1573 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1576 * The opcode organization of MXU instructions
1577 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1579 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1580 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1581 * other bits up to the instruction level is as follows:
1586 * ┌─ 000000 ─ OPC_MXU_S32MADD
1587 * ├─ 000001 ─ OPC_MXU_S32MADDU
1588 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1591 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1592 * │ ├─ 001 ─ OPC_MXU_S32MIN
1593 * │ ├─ 010 ─ OPC_MXU_D16MAX
1594 * │ ├─ 011 ─ OPC_MXU_D16MIN
1595 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1596 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1597 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1598 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1599 * ├─ 000100 ─ OPC_MXU_S32MSUB
1600 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1601 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1602 * │ ├─ 001 ─ OPC_MXU_D16SLT
1603 * │ ├─ 010 ─ OPC_MXU_D16AVG
1604 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1605 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1606 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1607 * │ └─ 111 ─ OPC_MXU_Q8ADD
1610 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1611 * │ ├─ 010 ─ OPC_MXU_D16CPS
1612 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1613 * │ └─ 110 ─ OPC_MXU_Q16SAT
1614 * ├─ 001000 ─ OPC_MXU_D16MUL
1616 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1617 * │ └─ 01 ─ OPC_MXU_D16MULE
1618 * ├─ 001010 ─ OPC_MXU_D16MAC
1619 * ├─ 001011 ─ OPC_MXU_D16MACF
1620 * ├─ 001100 ─ OPC_MXU_D16MADL
1621 * ├─ 001101 ─ OPC_MXU_S16MAD
1622 * ├─ 001110 ─ OPC_MXU_Q16ADD
1623 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1624 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1625 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1628 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1629 * │ └─ 1 ─ OPC_MXU_S32STDR
1632 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1633 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1636 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1637 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1640 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1641 * │ └─ 1 ─ OPC_MXU_S32LDIR
1644 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1645 * │ └─ 1 ─ OPC_MXU_S32SDIR
1648 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1649 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1652 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1653 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1654 * ├─ 011000 ─ OPC_MXU_D32ADD
1656 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1657 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1658 * │ └─ 10 ─ OPC_MXU_D32ASUM
1659 * ├─ 011010 ─ <not assigned>
1661 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1662 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1663 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1666 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1667 * │ ├─ 01 ─ OPC_MXU_D8SUM
1668 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1669 * ├─ 011110 ─ <not assigned>
1670 * ├─ 011111 ─ <not assigned>
1671 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1672 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1673 * ├─ 100010 ─ OPC_MXU_S8LDD
1674 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1675 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1676 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1677 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1678 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1681 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1682 * │ ├─ 001 ─ OPC_MXU_S32ALN
1683 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1684 * │ ├─ 011 ─ OPC_MXU_S32LUI
1685 * │ ├─ 100 ─ OPC_MXU_S32NOR
1686 * │ ├─ 101 ─ OPC_MXU_S32AND
1687 * │ ├─ 110 ─ OPC_MXU_S32OR
1688 * │ └─ 111 ─ OPC_MXU_S32XOR
1691 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1692 * │ ├─ 001 ─ OPC_MXU_LXH
1693 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1694 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1695 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1696 * ├─ 101100 ─ OPC_MXU_S16LDI
1697 * ├─ 101101 ─ OPC_MXU_S16SDI
1698 * ├─ 101110 ─ OPC_MXU_S32M2I
1699 * ├─ 101111 ─ OPC_MXU_S32I2M
1700 * ├─ 110000 ─ OPC_MXU_D32SLL
1701 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1702 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1703 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1704 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1705 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1706 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1707 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1709 * ├─ 110111 ─ OPC_MXU_Q16SAR
1711 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1712 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1715 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1716 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1717 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1718 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1719 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1720 * │ └─ 101 ─ OPC_MXU_S32MOVN
1723 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1724 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1725 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1726 * ├─ 111100 ─ OPC_MXU_Q8MADL
1727 * ├─ 111101 ─ OPC_MXU_S32SFL
1728 * ├─ 111110 ─ OPC_MXU_Q8SAD
1729 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1734 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1735 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1739 OPC_MXU_S32MADD
= 0x00,
1740 OPC_MXU_S32MADDU
= 0x01,
1741 OPC__MXU_MUL
= 0x02,
1742 OPC_MXU__POOL00
= 0x03,
1743 OPC_MXU_S32MSUB
= 0x04,
1744 OPC_MXU_S32MSUBU
= 0x05,
1745 OPC_MXU__POOL01
= 0x06,
1746 OPC_MXU__POOL02
= 0x07,
1747 OPC_MXU_D16MUL
= 0x08,
1748 OPC_MXU__POOL03
= 0x09,
1749 OPC_MXU_D16MAC
= 0x0A,
1750 OPC_MXU_D16MACF
= 0x0B,
1751 OPC_MXU_D16MADL
= 0x0C,
1752 OPC_MXU_S16MAD
= 0x0D,
1753 OPC_MXU_Q16ADD
= 0x0E,
1754 OPC_MXU_D16MACE
= 0x0F,
1755 OPC_MXU__POOL04
= 0x10,
1756 OPC_MXU__POOL05
= 0x11,
1757 OPC_MXU__POOL06
= 0x12,
1758 OPC_MXU__POOL07
= 0x13,
1759 OPC_MXU__POOL08
= 0x14,
1760 OPC_MXU__POOL09
= 0x15,
1761 OPC_MXU__POOL10
= 0x16,
1762 OPC_MXU__POOL11
= 0x17,
1763 OPC_MXU_D32ADD
= 0x18,
1764 OPC_MXU__POOL12
= 0x19,
1765 /* not assigned 0x1A */
1766 OPC_MXU__POOL13
= 0x1B,
1767 OPC_MXU__POOL14
= 0x1C,
1768 OPC_MXU_Q8ACCE
= 0x1D,
1769 /* not assigned 0x1E */
1770 /* not assigned 0x1F */
1771 /* not assigned 0x20 */
1772 /* not assigned 0x21 */
1773 OPC_MXU_S8LDD
= 0x22,
1774 OPC_MXU_S8STD
= 0x23,
1775 OPC_MXU_S8LDI
= 0x24,
1776 OPC_MXU_S8SDI
= 0x25,
1777 OPC_MXU__POOL15
= 0x26,
1778 OPC_MXU__POOL16
= 0x27,
1779 OPC_MXU__POOL17
= 0x28,
1780 /* not assigned 0x29 */
1781 OPC_MXU_S16LDD
= 0x2A,
1782 OPC_MXU_S16STD
= 0x2B,
1783 OPC_MXU_S16LDI
= 0x2C,
1784 OPC_MXU_S16SDI
= 0x2D,
1785 OPC_MXU_S32M2I
= 0x2E,
1786 OPC_MXU_S32I2M
= 0x2F,
1787 OPC_MXU_D32SLL
= 0x30,
1788 OPC_MXU_D32SLR
= 0x31,
1789 OPC_MXU_D32SARL
= 0x32,
1790 OPC_MXU_D32SAR
= 0x33,
1791 OPC_MXU_Q16SLL
= 0x34,
1792 OPC_MXU_Q16SLR
= 0x35,
1793 OPC_MXU__POOL18
= 0x36,
1794 OPC_MXU_Q16SAR
= 0x37,
1795 OPC_MXU__POOL19
= 0x38,
1796 OPC_MXU__POOL20
= 0x39,
1797 OPC_MXU__POOL21
= 0x3A,
1798 OPC_MXU_Q16SCOP
= 0x3B,
1799 OPC_MXU_Q8MADL
= 0x3C,
1800 OPC_MXU_S32SFL
= 0x3D,
1801 OPC_MXU_Q8SAD
= 0x3E,
1802 /* not assigned 0x3F */
1810 OPC_MXU_S32MAX
= 0x00,
1811 OPC_MXU_S32MIN
= 0x01,
1812 OPC_MXU_D16MAX
= 0x02,
1813 OPC_MXU_D16MIN
= 0x03,
1814 OPC_MXU_Q8MAX
= 0x04,
1815 OPC_MXU_Q8MIN
= 0x05,
1816 OPC_MXU_Q8SLT
= 0x06,
1817 OPC_MXU_Q8SLTU
= 0x07,
1824 OPC_MXU_S32SLT
= 0x00,
1825 OPC_MXU_D16SLT
= 0x01,
1826 OPC_MXU_D16AVG
= 0x02,
1827 OPC_MXU_D16AVGR
= 0x03,
1828 OPC_MXU_Q8AVG
= 0x04,
1829 OPC_MXU_Q8AVGR
= 0x05,
1830 OPC_MXU_Q8ADD
= 0x07,
1837 OPC_MXU_S32CPS
= 0x00,
1838 OPC_MXU_D16CPS
= 0x02,
1839 OPC_MXU_Q8ABD
= 0x04,
1840 OPC_MXU_Q16SAT
= 0x06,
1847 OPC_MXU_D16MULF
= 0x00,
1848 OPC_MXU_D16MULE
= 0x01,
1855 OPC_MXU_S32LDD
= 0x00,
1856 OPC_MXU_S32LDDR
= 0x01,
1863 OPC_MXU_S32STD
= 0x00,
1864 OPC_MXU_S32STDR
= 0x01,
1871 OPC_MXU_S32LDDV
= 0x00,
1872 OPC_MXU_S32LDDVR
= 0x01,
1879 OPC_MXU_S32STDV
= 0x00,
1880 OPC_MXU_S32STDVR
= 0x01,
1887 OPC_MXU_S32LDI
= 0x00,
1888 OPC_MXU_S32LDIR
= 0x01,
1895 OPC_MXU_S32SDI
= 0x00,
1896 OPC_MXU_S32SDIR
= 0x01,
1903 OPC_MXU_S32LDIV
= 0x00,
1904 OPC_MXU_S32LDIVR
= 0x01,
1911 OPC_MXU_S32SDIV
= 0x00,
1912 OPC_MXU_S32SDIVR
= 0x01,
1919 OPC_MXU_D32ACC
= 0x00,
1920 OPC_MXU_D32ACCM
= 0x01,
1921 OPC_MXU_D32ASUM
= 0x02,
1928 OPC_MXU_Q16ACC
= 0x00,
1929 OPC_MXU_Q16ACCM
= 0x01,
1930 OPC_MXU_Q16ASUM
= 0x02,
1937 OPC_MXU_Q8ADDE
= 0x00,
1938 OPC_MXU_D8SUM
= 0x01,
1939 OPC_MXU_D8SUMC
= 0x02,
1946 OPC_MXU_S32MUL
= 0x00,
1947 OPC_MXU_S32MULU
= 0x01,
1948 OPC_MXU_S32EXTR
= 0x02,
1949 OPC_MXU_S32EXTRV
= 0x03,
1956 OPC_MXU_D32SARW
= 0x00,
1957 OPC_MXU_S32ALN
= 0x01,
1958 OPC_MXU_S32ALNI
= 0x02,
1959 OPC_MXU_S32LUI
= 0x03,
1960 OPC_MXU_S32NOR
= 0x04,
1961 OPC_MXU_S32AND
= 0x05,
1962 OPC_MXU_S32OR
= 0x06,
1963 OPC_MXU_S32XOR
= 0x07,
1973 OPC_MXU_LXBU
= 0x04,
1974 OPC_MXU_LXHU
= 0x05,
1981 OPC_MXU_D32SLLV
= 0x00,
1982 OPC_MXU_D32SLRV
= 0x01,
1983 OPC_MXU_D32SARV
= 0x03,
1984 OPC_MXU_Q16SLLV
= 0x04,
1985 OPC_MXU_Q16SLRV
= 0x05,
1986 OPC_MXU_Q16SARV
= 0x07,
1993 OPC_MXU_Q8MUL
= 0x00,
1994 OPC_MXU_Q8MULSU
= 0x01,
2001 OPC_MXU_Q8MOVZ
= 0x00,
2002 OPC_MXU_Q8MOVN
= 0x01,
2003 OPC_MXU_D16MOVZ
= 0x02,
2004 OPC_MXU_D16MOVN
= 0x03,
2005 OPC_MXU_S32MOVZ
= 0x04,
2006 OPC_MXU_S32MOVN
= 0x05,
2013 OPC_MXU_Q8MAC
= 0x00,
2014 OPC_MXU_Q8MACSU
= 0x01,
2018 * Overview of the TX79-specific instruction set
2019 * =============================================
2021 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
2022 * are only used by the specific quadword (128-bit) LQ/SQ load/store
2023 * instructions and certain multimedia instructions (MMIs). These MMIs
2024 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
2025 * or sixteen 8-bit paths.
2029 * The Toshiba TX System RISC TX79 Core Architecture manual,
2030 * https://wiki.qemu.org/File:C790.pdf
2032 * Three-Operand Multiply and Multiply-Add (4 instructions)
2033 * --------------------------------------------------------
2034 * MADD [rd,] rs, rt Multiply/Add
2035 * MADDU [rd,] rs, rt Multiply/Add Unsigned
2036 * MULT [rd,] rs, rt Multiply (3-operand)
2037 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
2039 * Multiply Instructions for Pipeline 1 (10 instructions)
2040 * ------------------------------------------------------
2041 * MULT1 [rd,] rs, rt Multiply Pipeline 1
2042 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
2043 * DIV1 rs, rt Divide Pipeline 1
2044 * DIVU1 rs, rt Divide Unsigned Pipeline 1
2045 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
2046 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
2047 * MFHI1 rd Move From HI1 Register
2048 * MFLO1 rd Move From LO1 Register
2049 * MTHI1 rs Move To HI1 Register
2050 * MTLO1 rs Move To LO1 Register
2052 * Arithmetic (19 instructions)
2053 * ----------------------------
2054 * PADDB rd, rs, rt Parallel Add Byte
2055 * PSUBB rd, rs, rt Parallel Subtract Byte
2056 * PADDH rd, rs, rt Parallel Add Halfword
2057 * PSUBH rd, rs, rt Parallel Subtract Halfword
2058 * PADDW rd, rs, rt Parallel Add Word
2059 * PSUBW rd, rs, rt Parallel Subtract Word
2060 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
2061 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
2062 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
2063 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
2064 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
2065 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
2066 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
2067 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
2068 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
2069 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
2070 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
2071 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
2072 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
2074 * Min/Max (4 instructions)
2075 * ------------------------
2076 * PMAXH rd, rs, rt Parallel Maximum Halfword
2077 * PMINH rd, rs, rt Parallel Minimum Halfword
2078 * PMAXW rd, rs, rt Parallel Maximum Word
2079 * PMINW rd, rs, rt Parallel Minimum Word
2081 * Absolute (2 instructions)
2082 * -------------------------
2083 * PABSH rd, rt Parallel Absolute Halfword
2084 * PABSW rd, rt Parallel Absolute Word
2086 * Logical (4 instructions)
2087 * ------------------------
2088 * PAND rd, rs, rt Parallel AND
2089 * POR rd, rs, rt Parallel OR
2090 * PXOR rd, rs, rt Parallel XOR
2091 * PNOR rd, rs, rt Parallel NOR
2093 * Shift (9 instructions)
2094 * ----------------------
2095 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
2096 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
2097 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
2098 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
2099 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
2100 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
2101 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
2102 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
2103 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
2105 * Compare (6 instructions)
2106 * ------------------------
2107 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
2108 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
2109 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
2110 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
2111 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
2112 * PCEQW rd, rs, rt Parallel Compare for Equal Word
2114 * LZC (1 instruction)
2115 * -------------------
2116 * PLZCW rd, rs Parallel Leading Zero or One Count Word
2118 * Quadword Load and Store (2 instructions)
2119 * ----------------------------------------
2120 * LQ rt, offset(base) Load Quadword
2121 * SQ rt, offset(base) Store Quadword
2123 * Multiply and Divide (19 instructions)
2124 * -------------------------------------
2125 * PMULTW rd, rs, rt Parallel Multiply Word
2126 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
2127 * PDIVW rs, rt Parallel Divide Word
2128 * PDIVUW rs, rt Parallel Divide Unsigned Word
2129 * PMADDW rd, rs, rt Parallel Multiply-Add Word
2130 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
2131 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
2132 * PMULTH rd, rs, rt Parallel Multiply Halfword
2133 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
2134 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
2135 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
2136 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
2137 * PDIVBW rs, rt Parallel Divide Broadcast Word
2138 * PMFHI rd Parallel Move From HI Register
2139 * PMFLO rd Parallel Move From LO Register
2140 * PMTHI rs Parallel Move To HI Register
2141 * PMTLO rs Parallel Move To LO Register
2142 * PMFHL rd Parallel Move From HI/LO Register
2143 * PMTHL rs Parallel Move To HI/LO Register
2145 * Pack/Extend (11 instructions)
2146 * -----------------------------
2147 * PPAC5 rd, rt Parallel Pack to 5 bits
2148 * PPACB rd, rs, rt Parallel Pack to Byte
2149 * PPACH rd, rs, rt Parallel Pack to Halfword
2150 * PPACW rd, rs, rt Parallel Pack to Word
2151 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
2152 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
2153 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
2154 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
2155 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
2156 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
2157 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
2159 * Others (16 instructions)
2160 * ------------------------
2161 * PCPYH rd, rt Parallel Copy Halfword
2162 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
2163 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
2164 * PREVH rd, rt Parallel Reverse Halfword
2165 * PINTH rd, rs, rt Parallel Interleave Halfword
2166 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
2167 * PEXEH rd, rt Parallel Exchange Even Halfword
2168 * PEXCH rd, rt Parallel Exchange Center Halfword
2169 * PEXEW rd, rt Parallel Exchange Even Word
2170 * PEXCW rd, rt Parallel Exchange Center Word
2171 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
2172 * MFSA rd Move from Shift Amount Register
2173 * MTSA rs Move to Shift Amount Register
2174 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
2175 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
2176 * PROT3W rd, rt Parallel Rotate 3 Words
2178 * MMI (MultiMedia Instruction) encodings
2179 * ======================================
2181 * MMI instructions encoding table keys:
2183 * * This code is reserved for future use. An attempt to execute it
2184 * causes a Reserved Instruction exception.
2185 * % This code indicates an instruction class. The instruction word
2186 * must be further decoded by examining additional tables that show
2187 * the values for other instruction fields.
2188 * # This code is reserved for the unsupported instructions DMULT,
2189 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
2190 * to execute it causes a Reserved Instruction exception.
2192 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
2195 * +--------+----------------------------------------+
2197 * +--------+----------------------------------------+
2199 * opcode bits 28..26
2200 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2201 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2202 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2203 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
2204 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
2205 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
2206 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
2207 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
2208 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
2209 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
2210 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
2214 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
2215 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
2216 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
2220 * MMI instructions with opcode field = MMI:
2223 * +--------+-------------------------------+--------+
2224 * | MMI | |function|
2225 * +--------+-------------------------------+--------+
2227 * function bits 2..0
2228 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
2229 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
2230 * -------+-------+-------+-------+-------+-------+-------+-------+-------
2231 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
2232 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
2233 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
2234 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
2235 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
2236 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
2237 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
2238 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
2241 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
2243 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
2244 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
2245 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
2246 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
2247 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
2248 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
2249 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
2250 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
2251 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
2252 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
2253 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
2254 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
2255 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
2256 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
2257 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
2258 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
2259 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
2260 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
2261 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
2262 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
2263 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
2264 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
2265 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
2266 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
2267 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
2271 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
2274 * +--------+----------------------+--------+--------+
2275 * | MMI | |function| MMI0 |
2276 * +--------+----------------------+--------+--------+
2278 * function bits 7..6
2279 * bits | 0 | 1 | 2 | 3
2280 * 10..8 | 00 | 01 | 10 | 11
2281 * -------+-------+-------+-------+-------
2282 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
2283 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
2284 * 2 010 | PADDB | PSUBB | PCGTB | *
2285 * 3 011 | * | * | * | *
2286 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
2287 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
2288 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
2289 * 7 111 | * | * | PEXT5 | PPAC5
2292 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2294 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
2295 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
2296 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
2297 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
2298 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
2299 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
2300 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
2301 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
2302 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
2303 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
2304 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
2305 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
2306 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
2307 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
2308 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
2309 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
2310 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
2311 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
2312 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
2313 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
2314 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
2315 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
2316 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
2317 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
2318 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
2322 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
2325 * +--------+----------------------+--------+--------+
2326 * | MMI | |function| MMI1 |
2327 * +--------+----------------------+--------+--------+
2329 * function bits 7..6
2330 * bits | 0 | 1 | 2 | 3
2331 * 10..8 | 00 | 01 | 10 | 11
2332 * -------+-------+-------+-------+-------
2333 * 0 000 | * | PABSW | PCEQW | PMINW
2334 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
2335 * 2 010 | * | * | PCEQB | *
2336 * 3 011 | * | * | * | *
2337 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
2338 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
2339 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
2340 * 7 111 | * | * | * | *
2343 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2345 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
2346 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
2347 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
2348 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
2349 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
2350 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
2351 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
2352 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
2353 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
2354 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
2355 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
2356 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
2357 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
2358 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
2359 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
2360 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
2361 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
2362 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
2366 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
2369 * +--------+----------------------+--------+--------+
2370 * | MMI | |function| MMI2 |
2371 * +--------+----------------------+--------+--------+
2373 * function bits 7..6
2374 * bits | 0 | 1 | 2 | 3
2375 * 10..8 | 00 | 01 | 10 | 11
2376 * -------+-------+-------+-------+-------
2377 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
2378 * 1 001 | PMSUBW| * | * | *
2379 * 2 010 | PMFHI | PMFLO | PINTH | *
2380 * 3 011 | PMULTW| PDIVW | PCPYLD| *
2381 * 4 100 | PMADDH| PHMADH| PAND | PXOR
2382 * 5 101 | PMSUBH| PHMSBH| * | *
2383 * 6 110 | * | * | PEXEH | PREVH
2384 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
2387 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2389 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
2390 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
2391 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
2392 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
2393 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
2394 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
2395 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
2396 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
2397 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
2398 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
2399 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
2400 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
2401 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
2402 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
2403 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
2404 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
2405 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
2406 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
2407 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
2408 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
2409 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
2410 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
2414 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
2417 * +--------+----------------------+--------+--------+
2418 * | MMI | |function| MMI3 |
2419 * +--------+----------------------+--------+--------+
2421 * function bits 7..6
2422 * bits | 0 | 1 | 2 | 3
2423 * 10..8 | 00 | 01 | 10 | 11
2424 * -------+-------+-------+-------+-------
2425 * 0 000 |PMADDUW| * | * | PSRAVW
2426 * 1 001 | * | * | * | *
2427 * 2 010 | PMTHI | PMTLO | PINTEH| *
2428 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
2429 * 4 100 | * | * | POR | PNOR
2430 * 5 101 | * | * | * | *
2431 * 6 110 | * | * | PEXCH | PCPYH
2432 * 7 111 | * | * | PEXCW | *
2435 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
2437 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
2438 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
2439 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
2440 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
2441 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
2442 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
2443 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
2444 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
2445 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
2446 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
2447 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
2448 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
2449 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
2452 /* global register indices */
2453 static TCGv cpu_gpr
[32], cpu_PC
;
2454 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
2455 static TCGv cpu_dspctrl
, btarget
, bcond
;
2456 static TCGv cpu_lladdr
, cpu_llval
;
2457 static TCGv_i32 hflags
;
2458 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
2459 static TCGv_i64 fpu_f64
[32];
2460 static TCGv_i64 msa_wr_d
[64];
2462 #if defined(TARGET_MIPS64)
2463 /* Upper halves of R5900's 128-bit registers: MMRs (multimedia registers) */
2464 static TCGv_i64 cpu_mmr
[32];
2467 #if !defined(TARGET_MIPS64)
2469 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
2473 #include "exec/gen-icount.h"
2475 #define gen_helper_0e0i(name, arg) do { \
2476 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
2477 gen_helper_##name(cpu_env, helper_tmp); \
2478 tcg_temp_free_i32(helper_tmp); \
2481 #define gen_helper_0e1i(name, arg1, arg2) do { \
2482 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2483 gen_helper_##name(cpu_env, arg1, helper_tmp); \
2484 tcg_temp_free_i32(helper_tmp); \
2487 #define gen_helper_1e0i(name, ret, arg1) do { \
2488 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
2489 gen_helper_##name(ret, cpu_env, helper_tmp); \
2490 tcg_temp_free_i32(helper_tmp); \
2493 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
2494 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
2495 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
2496 tcg_temp_free_i32(helper_tmp); \
2499 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2500 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2501 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2502 tcg_temp_free_i32(helper_tmp); \
2505 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2506 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2507 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2508 tcg_temp_free_i32(helper_tmp); \
2511 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2512 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2513 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2514 tcg_temp_free_i32(helper_tmp); \
2517 typedef struct DisasContext
{
2518 DisasContextBase base
;
2519 target_ulong saved_pc
;
2520 target_ulong page_start
;
2522 uint64_t insn_flags
;
2523 int32_t CP0_Config1
;
2524 int32_t CP0_Config2
;
2525 int32_t CP0_Config3
;
2526 int32_t CP0_Config5
;
2527 /* Routine used to access memory */
2529 TCGMemOp default_tcg_memop_mask
;
2530 uint32_t hflags
, saved_hflags
;
2531 target_ulong btarget
;
2542 int CP0_LLAddr_shift
;
2552 #define DISAS_STOP DISAS_TARGET_0
2553 #define DISAS_EXIT DISAS_TARGET_1
2555 static const char * const regnames
[] = {
2556 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2557 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2558 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2559 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2562 static const char * const regnames_HI
[] = {
2563 "HI0", "HI1", "HI2", "HI3",
2566 static const char * const regnames_LO
[] = {
2567 "LO0", "LO1", "LO2", "LO3",
2570 static const char * const fregnames
[] = {
2571 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2572 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2573 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2574 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2577 static const char * const msaregnames
[] = {
2578 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
2579 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
2580 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
2581 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
2582 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
2583 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
2584 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
2585 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
2586 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
2587 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
2588 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
2589 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
2590 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
2591 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
2592 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
2593 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
2596 #if !defined(TARGET_MIPS64)
2597 static const char * const mxuregnames
[] = {
2598 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2599 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2603 #define LOG_DISAS(...) \
2605 if (MIPS_DEBUG_DISAS) { \
2606 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
2610 #define MIPS_INVAL(op) \
2612 if (MIPS_DEBUG_DISAS) { \
2613 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
2614 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
2615 ctx->base.pc_next, ctx->opcode, op, \
2616 ctx->opcode >> 26, ctx->opcode & 0x3F, \
2617 ((ctx->opcode >> 16) & 0x1F)); \
2621 /* General purpose registers moves. */
2622 static inline void gen_load_gpr(TCGv t
, int reg
)
2625 tcg_gen_movi_tl(t
, 0);
2627 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2631 static inline void gen_store_gpr(TCGv t
, int reg
)
2634 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2638 /* Moves to/from shadow registers. */
2639 static inline void gen_load_srsgpr(int from
, int to
)
2641 TCGv t0
= tcg_temp_new();
2644 tcg_gen_movi_tl(t0
, 0);
2646 TCGv_i32 t2
= tcg_temp_new_i32();
2647 TCGv_ptr addr
= tcg_temp_new_ptr();
2649 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2650 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2651 tcg_gen_andi_i32(t2
, t2
, 0xf);
2652 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2653 tcg_gen_ext_i32_ptr(addr
, t2
);
2654 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2656 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2657 tcg_temp_free_ptr(addr
);
2658 tcg_temp_free_i32(t2
);
2660 gen_store_gpr(t0
, to
);
2664 static inline void gen_store_srsgpr (int from
, int to
)
2667 TCGv t0
= tcg_temp_new();
2668 TCGv_i32 t2
= tcg_temp_new_i32();
2669 TCGv_ptr addr
= tcg_temp_new_ptr();
2671 gen_load_gpr(t0
, from
);
2672 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2673 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2674 tcg_gen_andi_i32(t2
, t2
, 0xf);
2675 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2676 tcg_gen_ext_i32_ptr(addr
, t2
);
2677 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2679 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2680 tcg_temp_free_ptr(addr
);
2681 tcg_temp_free_i32(t2
);
2686 #if !defined(TARGET_MIPS64)
2687 /* MXU General purpose registers moves. */
2688 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2691 tcg_gen_movi_tl(t
, 0);
2692 } else if (reg
<= 15) {
2693 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2697 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2699 if (reg
> 0 && reg
<= 15) {
2700 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2704 /* MXU control register moves. */
2705 static inline void gen_load_mxu_cr(TCGv t
)
2707 tcg_gen_mov_tl(t
, mxu_CR
);
2710 static inline void gen_store_mxu_cr(TCGv t
)
2712 /* TODO: Add handling of RW rules for MXU_CR. */
2713 tcg_gen_mov_tl(mxu_CR
, t
);
2719 static inline void gen_save_pc(target_ulong pc
)
2721 tcg_gen_movi_tl(cpu_PC
, pc
);
2724 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2726 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2727 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2728 gen_save_pc(ctx
->base
.pc_next
);
2729 ctx
->saved_pc
= ctx
->base
.pc_next
;
2731 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2732 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2733 ctx
->saved_hflags
= ctx
->hflags
;
2734 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2740 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2746 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2748 ctx
->saved_hflags
= ctx
->hflags
;
2749 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2755 ctx
->btarget
= env
->btarget
;
2760 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2762 TCGv_i32 texcp
= tcg_const_i32(excp
);
2763 TCGv_i32 terr
= tcg_const_i32(err
);
2764 save_cpu_state(ctx
, 1);
2765 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2766 tcg_temp_free_i32(terr
);
2767 tcg_temp_free_i32(texcp
);
2768 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2771 static inline void generate_exception(DisasContext
*ctx
, int excp
)
2773 gen_helper_0e0i(raise_exception
, excp
);
2776 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
2778 generate_exception_err(ctx
, excp
, 0);
2781 /* Floating point register moves. */
2782 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2784 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2785 generate_exception(ctx
, EXCP_RI
);
2787 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2790 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2793 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2794 generate_exception(ctx
, EXCP_RI
);
2796 t64
= tcg_temp_new_i64();
2797 tcg_gen_extu_i32_i64(t64
, t
);
2798 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2799 tcg_temp_free_i64(t64
);
2802 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2804 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2805 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2807 gen_load_fpr32(ctx
, t
, reg
| 1);
2811 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2813 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2814 TCGv_i64 t64
= tcg_temp_new_i64();
2815 tcg_gen_extu_i32_i64(t64
, t
);
2816 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2817 tcg_temp_free_i64(t64
);
2819 gen_store_fpr32(ctx
, t
, reg
| 1);
2823 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2825 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2826 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2828 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2832 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2834 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2835 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2838 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2839 t0
= tcg_temp_new_i64();
2840 tcg_gen_shri_i64(t0
, t
, 32);
2841 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2842 tcg_temp_free_i64(t0
);
2846 static inline int get_fp_bit(int cc
)
2855 /* Addresses computation */
2856 static inline void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
,
2859 tcg_gen_add_tl(ret
, arg0
, arg1
);
2861 #if defined(TARGET_MIPS64)
2862 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2863 tcg_gen_ext32s_i64(ret
, ret
);
2868 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2871 tcg_gen_addi_tl(ret
, base
, ofs
);
2873 #if defined(TARGET_MIPS64)
2874 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2875 tcg_gen_ext32s_i64(ret
, ret
);
2880 /* Addresses computation (translation time) */
2881 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2884 target_long sum
= base
+ offset
;
2886 #if defined(TARGET_MIPS64)
2887 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2894 /* Sign-extract the low 32-bits to a target_long. */
2895 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2897 #if defined(TARGET_MIPS64)
2898 tcg_gen_ext32s_i64(ret
, arg
);
2900 tcg_gen_extrl_i64_i32(ret
, arg
);
2904 /* Sign-extract the high 32-bits to a target_long. */
2905 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2907 #if defined(TARGET_MIPS64)
2908 tcg_gen_sari_i64(ret
, arg
, 32);
2910 tcg_gen_extrh_i64_i32(ret
, arg
);
2914 static inline void check_cp0_enabled(DisasContext
*ctx
)
2916 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2917 generate_exception_err(ctx
, EXCP_CpU
, 0);
2921 static inline void check_cp1_enabled(DisasContext
*ctx
)
2923 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
2924 generate_exception_err(ctx
, EXCP_CpU
, 1);
2929 * Verify that the processor is running with COP1X instructions enabled.
2930 * This is associated with the nabla symbol in the MIPS32 and MIPS64
2933 static inline void check_cop1x(DisasContext
*ctx
)
2935 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
2936 generate_exception_end(ctx
, EXCP_RI
);
2941 * Verify that the processor is running with 64-bit floating-point
2942 * operations enabled.
2944 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
2946 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
2947 generate_exception_end(ctx
, EXCP_RI
);
2952 * Verify if floating point register is valid; an operation is not defined
2953 * if bit 0 of any register specification is set and the FR bit in the
2954 * Status register equals zero, since the register numbers specify an
2955 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2956 * in the Status register equals one, both even and odd register numbers
2957 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2959 * Multiple 64 bit wide registers can be checked by calling
2960 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2962 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
2964 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
2965 generate_exception_end(ctx
, EXCP_RI
);
2970 * Verify that the processor is running with DSP instructions enabled.
2971 * This is enabled by CP0 Status register MX(24) bit.
2973 static inline void check_dsp(DisasContext
*ctx
)
2975 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2976 if (ctx
->insn_flags
& ASE_DSP
) {
2977 generate_exception_end(ctx
, EXCP_DSPDIS
);
2979 generate_exception_end(ctx
, EXCP_RI
);
2984 static inline void check_dsp_r2(DisasContext
*ctx
)
2986 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2987 if (ctx
->insn_flags
& ASE_DSP
) {
2988 generate_exception_end(ctx
, EXCP_DSPDIS
);
2990 generate_exception_end(ctx
, EXCP_RI
);
2995 static inline void check_dsp_r3(DisasContext
*ctx
)
2997 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2998 if (ctx
->insn_flags
& ASE_DSP
) {
2999 generate_exception_end(ctx
, EXCP_DSPDIS
);
3001 generate_exception_end(ctx
, EXCP_RI
);
3007 * This code generates a "reserved instruction" exception if the
3008 * CPU does not support the instruction set corresponding to flags.
3010 static inline void check_insn(DisasContext
*ctx
, uint64_t flags
)
3012 if (unlikely(!(ctx
->insn_flags
& flags
))) {
3013 generate_exception_end(ctx
, EXCP_RI
);
3018 * This code generates a "reserved instruction" exception if the
3019 * CPU has corresponding flag set which indicates that the instruction
3022 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
3024 if (unlikely(ctx
->insn_flags
& flags
)) {
3025 generate_exception_end(ctx
, EXCP_RI
);
3030 * The Linux kernel traps certain reserved instruction exceptions to
3031 * emulate the corresponding instructions. QEMU is the kernel in user
3032 * mode, so those traps are emulated by accepting the instructions.
3034 * A reserved instruction exception is generated for flagged CPUs if
3035 * QEMU runs in system mode.
3037 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
3039 #ifndef CONFIG_USER_ONLY
3040 check_insn_opc_removed(ctx
, flags
);
3045 * This code generates a "reserved instruction" exception if the
3046 * CPU does not support 64-bit paired-single (PS) floating point data type.
3048 static inline void check_ps(DisasContext
*ctx
)
3050 if (unlikely(!ctx
->ps
)) {
3051 generate_exception(ctx
, EXCP_RI
);
3053 check_cp1_64bitmode(ctx
);
3056 #ifdef TARGET_MIPS64
3058 * This code generates a "reserved instruction" exception if 64-bit
3059 * instructions are not enabled.
3061 static inline void check_mips_64(DisasContext
*ctx
)
3063 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
))) {
3064 generate_exception_end(ctx
, EXCP_RI
);
3069 #ifndef CONFIG_USER_ONLY
3070 static inline void check_mvh(DisasContext
*ctx
)
3072 if (unlikely(!ctx
->mvh
)) {
3073 generate_exception(ctx
, EXCP_RI
);
3079 * This code generates a "reserved instruction" exception if the
3080 * Config5 XNP bit is set.
3082 static inline void check_xnp(DisasContext
*ctx
)
3084 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
3085 generate_exception_end(ctx
, EXCP_RI
);
3089 #ifndef CONFIG_USER_ONLY
3091 * This code generates a "reserved instruction" exception if the
3092 * Config3 PW bit is NOT set.
3094 static inline void check_pw(DisasContext
*ctx
)
3096 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
3097 generate_exception_end(ctx
, EXCP_RI
);
3103 * This code generates a "reserved instruction" exception if the
3104 * Config3 MT bit is NOT set.
3106 static inline void check_mt(DisasContext
*ctx
)
3108 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3109 generate_exception_end(ctx
, EXCP_RI
);
3113 #ifndef CONFIG_USER_ONLY
3115 * This code generates a "coprocessor unusable" exception if CP0 is not
3116 * available, and, if that is not the case, generates a "reserved instruction"
3117 * exception if the Config5 MT bit is NOT set. This is needed for availability
3118 * control of some of MT ASE instructions.
3120 static inline void check_cp0_mt(DisasContext
*ctx
)
3122 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
3123 generate_exception_err(ctx
, EXCP_CpU
, 0);
3125 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
3126 generate_exception_err(ctx
, EXCP_RI
, 0);
3133 * This code generates a "reserved instruction" exception if the
3134 * Config5 NMS bit is set.
3136 static inline void check_nms(DisasContext
*ctx
)
3138 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
3139 generate_exception_end(ctx
, EXCP_RI
);
3144 * This code generates a "reserved instruction" exception if the
3145 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
3146 * Config2 TL, and Config5 L2C are unset.
3148 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
3150 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
3151 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
3152 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
3153 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
3154 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
3155 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
3156 generate_exception_end(ctx
, EXCP_RI
);
3161 * This code generates a "reserved instruction" exception if the
3162 * Config5 EVA bit is NOT set.
3164 static inline void check_eva(DisasContext
*ctx
)
3166 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
3167 generate_exception_end(ctx
, EXCP_RI
);
3173 * Define small wrappers for gen_load_fpr* so that we have a uniform
3174 * calling interface for 32 and 64-bit FPRs. No sense in changing
3175 * all callers for gen_load_fpr32 when we need the CTX parameter for
3178 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
3179 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
3180 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
3181 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
3182 int ft, int fs, int cc) \
3184 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
3185 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
3194 check_cp1_registers(ctx, fs | ft); \
3202 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
3203 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
3206 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
3209 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
3212 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
3215 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
3218 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
3221 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
3224 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
3227 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
3230 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
3233 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
3236 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
3239 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
3242 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
3245 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
3248 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
3251 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
3256 tcg_temp_free_i##bits (fp0); \
3257 tcg_temp_free_i##bits (fp1); \
3260 FOP_CONDS(, 0, d
, FMT_D
, 64)
3261 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
3262 FOP_CONDS(, 0, s
, FMT_S
, 32)
3263 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
3264 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
3265 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
3268 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
3269 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
3270 int ft, int fs, int fd) \
3272 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
3273 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
3274 if (ifmt == FMT_D) { \
3275 check_cp1_registers(ctx, fs | ft | fd); \
3277 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
3278 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
3281 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
3284 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
3287 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
3290 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
3293 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
3296 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
3299 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
3302 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
3305 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
3308 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
3311 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
3314 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
3317 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
3320 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
3323 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
3326 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
3329 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
3332 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
3335 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
3338 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
3341 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
3344 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
3350 tcg_temp_free_i ## bits (fp0); \
3351 tcg_temp_free_i ## bits (fp1); \
3354 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
3355 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
3357 #undef gen_ldcmp_fpr32
3358 #undef gen_ldcmp_fpr64
3360 /* load/store instructions. */
3361 #ifdef CONFIG_USER_ONLY
3362 #define OP_LD_ATOMIC(insn,fname) \
3363 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3364 DisasContext *ctx) \
3366 TCGv t0 = tcg_temp_new(); \
3367 tcg_gen_mov_tl(t0, arg1); \
3368 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
3369 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
3370 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
3371 tcg_temp_free(t0); \
3374 #define OP_LD_ATOMIC(insn,fname) \
3375 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
3376 DisasContext *ctx) \
3378 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
3381 OP_LD_ATOMIC(ll
,ld32s
);
3382 #if defined(TARGET_MIPS64)
3383 OP_LD_ATOMIC(lld
,ld64
);
3387 static void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
,
3388 int base
, int offset
)
3391 tcg_gen_movi_tl(addr
, offset
);
3392 } else if (offset
== 0) {
3393 gen_load_gpr(addr
, base
);
3395 tcg_gen_movi_tl(addr
, offset
);
3396 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
3400 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
3402 target_ulong pc
= ctx
->base
.pc_next
;
3404 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
3405 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
3410 pc
&= ~(target_ulong
)3;
3415 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
3416 int rt
, int base
, int offset
)
3419 int mem_idx
= ctx
->mem_idx
;
3421 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
3423 * Loongson CPU uses a load to zero register for prefetch.
3424 * We emulate it as a NOP. On other CPU we must perform the
3425 * actual memory access.
3430 t0
= tcg_temp_new();
3431 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3434 #if defined(TARGET_MIPS64)
3436 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
3437 ctx
->default_tcg_memop_mask
);
3438 gen_store_gpr(t0
, rt
);
3441 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
3442 ctx
->default_tcg_memop_mask
);
3443 gen_store_gpr(t0
, rt
);
3447 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
3448 gen_store_gpr(t0
, rt
);
3451 t1
= tcg_temp_new();
3453 * Do a byte access to possibly trigger a page
3454 * fault with the unaligned address.
3456 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3457 tcg_gen_andi_tl(t1
, t0
, 7);
3458 #ifndef TARGET_WORDS_BIGENDIAN
3459 tcg_gen_xori_tl(t1
, t1
, 7);
3461 tcg_gen_shli_tl(t1
, t1
, 3);
3462 tcg_gen_andi_tl(t0
, t0
, ~7);
3463 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3464 tcg_gen_shl_tl(t0
, t0
, t1
);
3465 t2
= tcg_const_tl(-1);
3466 tcg_gen_shl_tl(t2
, t2
, t1
);
3467 gen_load_gpr(t1
, rt
);
3468 tcg_gen_andc_tl(t1
, t1
, t2
);
3470 tcg_gen_or_tl(t0
, t0
, t1
);
3472 gen_store_gpr(t0
, rt
);
3475 t1
= tcg_temp_new();
3477 * Do a byte access to possibly trigger a page
3478 * fault with the unaligned address.
3480 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3481 tcg_gen_andi_tl(t1
, t0
, 7);
3482 #ifdef TARGET_WORDS_BIGENDIAN
3483 tcg_gen_xori_tl(t1
, t1
, 7);
3485 tcg_gen_shli_tl(t1
, t1
, 3);
3486 tcg_gen_andi_tl(t0
, t0
, ~7);
3487 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3488 tcg_gen_shr_tl(t0
, t0
, t1
);
3489 tcg_gen_xori_tl(t1
, t1
, 63);
3490 t2
= tcg_const_tl(0xfffffffffffffffeull
);
3491 tcg_gen_shl_tl(t2
, t2
, t1
);
3492 gen_load_gpr(t1
, rt
);
3493 tcg_gen_and_tl(t1
, t1
, t2
);
3495 tcg_gen_or_tl(t0
, t0
, t1
);
3497 gen_store_gpr(t0
, rt
);
3500 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3501 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3503 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
3504 gen_store_gpr(t0
, rt
);
3508 t1
= tcg_const_tl(pc_relative_pc(ctx
));
3509 gen_op_addr_add(ctx
, t0
, t0
, t1
);
3511 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
3512 gen_store_gpr(t0
, rt
);
3515 mem_idx
= MIPS_HFLAG_UM
;
3518 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
3519 ctx
->default_tcg_memop_mask
);
3520 gen_store_gpr(t0
, rt
);
3523 mem_idx
= MIPS_HFLAG_UM
;
3526 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
3527 ctx
->default_tcg_memop_mask
);
3528 gen_store_gpr(t0
, rt
);
3531 mem_idx
= MIPS_HFLAG_UM
;
3534 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
3535 ctx
->default_tcg_memop_mask
);
3536 gen_store_gpr(t0
, rt
);
3539 mem_idx
= MIPS_HFLAG_UM
;
3542 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
3543 gen_store_gpr(t0
, rt
);
3546 mem_idx
= MIPS_HFLAG_UM
;
3549 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
3550 gen_store_gpr(t0
, rt
);
3553 mem_idx
= MIPS_HFLAG_UM
;
3556 t1
= tcg_temp_new();
3558 * Do a byte access to possibly trigger a page
3559 * fault with the unaligned address.
3561 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3562 tcg_gen_andi_tl(t1
, t0
, 3);
3563 #ifndef TARGET_WORDS_BIGENDIAN
3564 tcg_gen_xori_tl(t1
, t1
, 3);
3566 tcg_gen_shli_tl(t1
, t1
, 3);
3567 tcg_gen_andi_tl(t0
, t0
, ~3);
3568 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3569 tcg_gen_shl_tl(t0
, t0
, t1
);
3570 t2
= tcg_const_tl(-1);
3571 tcg_gen_shl_tl(t2
, t2
, t1
);
3572 gen_load_gpr(t1
, rt
);
3573 tcg_gen_andc_tl(t1
, t1
, t2
);
3575 tcg_gen_or_tl(t0
, t0
, t1
);
3577 tcg_gen_ext32s_tl(t0
, t0
);
3578 gen_store_gpr(t0
, rt
);
3581 mem_idx
= MIPS_HFLAG_UM
;
3584 t1
= tcg_temp_new();
3586 * Do a byte access to possibly trigger a page
3587 * fault with the unaligned address.
3589 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3590 tcg_gen_andi_tl(t1
, t0
, 3);
3591 #ifdef TARGET_WORDS_BIGENDIAN
3592 tcg_gen_xori_tl(t1
, t1
, 3);
3594 tcg_gen_shli_tl(t1
, t1
, 3);
3595 tcg_gen_andi_tl(t0
, t0
, ~3);
3596 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3597 tcg_gen_shr_tl(t0
, t0
, t1
);
3598 tcg_gen_xori_tl(t1
, t1
, 31);
3599 t2
= tcg_const_tl(0xfffffffeull
);
3600 tcg_gen_shl_tl(t2
, t2
, t1
);
3601 gen_load_gpr(t1
, rt
);
3602 tcg_gen_and_tl(t1
, t1
, t2
);
3604 tcg_gen_or_tl(t0
, t0
, t1
);
3606 tcg_gen_ext32s_tl(t0
, t0
);
3607 gen_store_gpr(t0
, rt
);
3610 mem_idx
= MIPS_HFLAG_UM
;
3614 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3615 gen_store_gpr(t0
, rt
);
3621 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3622 uint32_t reg1
, uint32_t reg2
)
3624 TCGv taddr
= tcg_temp_new();
3625 TCGv_i64 tval
= tcg_temp_new_i64();
3626 TCGv tmp1
= tcg_temp_new();
3627 TCGv tmp2
= tcg_temp_new();
3629 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3630 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3631 #ifdef TARGET_WORDS_BIGENDIAN
3632 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3634 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3636 gen_store_gpr(tmp1
, reg1
);
3637 tcg_temp_free(tmp1
);
3638 gen_store_gpr(tmp2
, reg2
);
3639 tcg_temp_free(tmp2
);
3640 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3641 tcg_temp_free_i64(tval
);
3642 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3643 tcg_temp_free(taddr
);
3647 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
3648 int base
, int offset
)
3650 TCGv t0
= tcg_temp_new();
3651 TCGv t1
= tcg_temp_new();
3652 int mem_idx
= ctx
->mem_idx
;
3654 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3655 gen_load_gpr(t1
, rt
);
3657 #if defined(TARGET_MIPS64)
3659 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3660 ctx
->default_tcg_memop_mask
);
3663 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3666 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3670 mem_idx
= MIPS_HFLAG_UM
;
3673 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3674 ctx
->default_tcg_memop_mask
);
3677 mem_idx
= MIPS_HFLAG_UM
;
3680 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3681 ctx
->default_tcg_memop_mask
);
3684 mem_idx
= MIPS_HFLAG_UM
;
3687 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3690 mem_idx
= MIPS_HFLAG_UM
;
3693 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3696 mem_idx
= MIPS_HFLAG_UM
;
3699 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3707 /* Store conditional */
3708 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3709 TCGMemOp tcg_mo
, bool eva
)
3712 TCGLabel
*l1
= gen_new_label();
3713 TCGLabel
*done
= gen_new_label();
3715 t0
= tcg_temp_new();
3716 addr
= tcg_temp_new();
3717 /* compare the address against that of the preceeding LL */
3718 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3719 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3720 tcg_temp_free(addr
);
3721 tcg_gen_movi_tl(t0
, 0);
3722 gen_store_gpr(t0
, rt
);
3726 /* generate cmpxchg */
3727 val
= tcg_temp_new();
3728 gen_load_gpr(val
, rt
);
3729 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3730 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3731 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3732 gen_store_gpr(t0
, rt
);
3735 gen_set_label(done
);
3740 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3741 uint32_t reg1
, uint32_t reg2
, bool eva
)
3743 TCGv taddr
= tcg_temp_local_new();
3744 TCGv lladdr
= tcg_temp_local_new();
3745 TCGv_i64 tval
= tcg_temp_new_i64();
3746 TCGv_i64 llval
= tcg_temp_new_i64();
3747 TCGv_i64 val
= tcg_temp_new_i64();
3748 TCGv tmp1
= tcg_temp_new();
3749 TCGv tmp2
= tcg_temp_new();
3750 TCGLabel
*lab_fail
= gen_new_label();
3751 TCGLabel
*lab_done
= gen_new_label();
3753 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3755 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3756 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3758 gen_load_gpr(tmp1
, reg1
);
3759 gen_load_gpr(tmp2
, reg2
);
3761 #ifdef TARGET_WORDS_BIGENDIAN
3762 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3764 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3767 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3768 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3769 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3771 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3773 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3775 gen_set_label(lab_fail
);
3778 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3780 gen_set_label(lab_done
);
3781 tcg_gen_movi_tl(lladdr
, -1);
3782 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3785 /* Load and store */
3786 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
3790 * Don't do NOP if destination is zero: we must perform the actual
3796 TCGv_i32 fp0
= tcg_temp_new_i32();
3797 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3798 ctx
->default_tcg_memop_mask
);
3799 gen_store_fpr32(ctx
, fp0
, ft
);
3800 tcg_temp_free_i32(fp0
);
3805 TCGv_i32 fp0
= tcg_temp_new_i32();
3806 gen_load_fpr32(ctx
, fp0
, ft
);
3807 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3808 ctx
->default_tcg_memop_mask
);
3809 tcg_temp_free_i32(fp0
);
3814 TCGv_i64 fp0
= tcg_temp_new_i64();
3815 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3816 ctx
->default_tcg_memop_mask
);
3817 gen_store_fpr64(ctx
, fp0
, ft
);
3818 tcg_temp_free_i64(fp0
);
3823 TCGv_i64 fp0
= tcg_temp_new_i64();
3824 gen_load_fpr64(ctx
, fp0
, ft
);
3825 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3826 ctx
->default_tcg_memop_mask
);
3827 tcg_temp_free_i64(fp0
);
3831 MIPS_INVAL("flt_ldst");
3832 generate_exception_end(ctx
, EXCP_RI
);
3837 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3838 int rs
, int16_t imm
)
3840 TCGv t0
= tcg_temp_new();
3842 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3843 check_cp1_enabled(ctx
);
3847 check_insn(ctx
, ISA_MIPS2
);
3850 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3851 gen_flt_ldst(ctx
, op
, rt
, t0
);
3854 generate_exception_err(ctx
, EXCP_CpU
, 1);
3859 /* Arithmetic with immediate operand */
3860 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3861 int rt
, int rs
, int imm
)
3863 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3865 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3867 * If no destination, treat it as a NOP.
3868 * For addi, we must generate the overflow exception when needed.
3875 TCGv t0
= tcg_temp_local_new();
3876 TCGv t1
= tcg_temp_new();
3877 TCGv t2
= tcg_temp_new();
3878 TCGLabel
*l1
= gen_new_label();
3880 gen_load_gpr(t1
, rs
);
3881 tcg_gen_addi_tl(t0
, t1
, uimm
);
3882 tcg_gen_ext32s_tl(t0
, t0
);
3884 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3885 tcg_gen_xori_tl(t2
, t0
, uimm
);
3886 tcg_gen_and_tl(t1
, t1
, t2
);
3888 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3890 /* operands of same sign, result different sign */
3891 generate_exception(ctx
, EXCP_OVERFLOW
);
3893 tcg_gen_ext32s_tl(t0
, t0
);
3894 gen_store_gpr(t0
, rt
);
3900 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3901 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3903 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3906 #if defined(TARGET_MIPS64)
3909 TCGv t0
= tcg_temp_local_new();
3910 TCGv t1
= tcg_temp_new();
3911 TCGv t2
= tcg_temp_new();
3912 TCGLabel
*l1
= gen_new_label();
3914 gen_load_gpr(t1
, rs
);
3915 tcg_gen_addi_tl(t0
, t1
, uimm
);
3917 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3918 tcg_gen_xori_tl(t2
, t0
, uimm
);
3919 tcg_gen_and_tl(t1
, t1
, t2
);
3921 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3923 /* operands of same sign, result different sign */
3924 generate_exception(ctx
, EXCP_OVERFLOW
);
3926 gen_store_gpr(t0
, rt
);
3932 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3934 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3941 /* Logic with immediate operand */
3942 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3943 int rt
, int rs
, int16_t imm
)
3948 /* If no destination, treat it as a NOP. */
3951 uimm
= (uint16_t)imm
;
3954 if (likely(rs
!= 0)) {
3955 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3957 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3962 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3964 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3968 if (likely(rs
!= 0)) {
3969 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3971 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3975 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
3977 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3978 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3980 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3989 /* Set on less than with immediate operand */
3990 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3991 int rt
, int rs
, int16_t imm
)
3993 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3997 /* If no destination, treat it as a NOP. */
4000 t0
= tcg_temp_new();
4001 gen_load_gpr(t0
, rs
);
4004 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
4007 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
4013 /* Shifts with immediate operand */
4014 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
4015 int rt
, int rs
, int16_t imm
)
4017 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
4021 /* If no destination, treat it as a NOP. */
4025 t0
= tcg_temp_new();
4026 gen_load_gpr(t0
, rs
);
4029 tcg_gen_shli_tl(t0
, t0
, uimm
);
4030 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4033 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4037 tcg_gen_ext32u_tl(t0
, t0
);
4038 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4040 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4045 TCGv_i32 t1
= tcg_temp_new_i32();
4047 tcg_gen_trunc_tl_i32(t1
, t0
);
4048 tcg_gen_rotri_i32(t1
, t1
, uimm
);
4049 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
4050 tcg_temp_free_i32(t1
);
4052 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
4055 #if defined(TARGET_MIPS64)
4057 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
4060 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
4063 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
4067 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
4069 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
4073 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4076 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4079 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4082 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
4090 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
4091 int rd
, int rs
, int rt
)
4093 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
4094 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
4096 * If no destination, treat it as a NOP.
4097 * For add & sub, we must generate the overflow exception when needed.
4105 TCGv t0
= tcg_temp_local_new();
4106 TCGv t1
= tcg_temp_new();
4107 TCGv t2
= tcg_temp_new();
4108 TCGLabel
*l1
= gen_new_label();
4110 gen_load_gpr(t1
, rs
);
4111 gen_load_gpr(t2
, rt
);
4112 tcg_gen_add_tl(t0
, t1
, t2
);
4113 tcg_gen_ext32s_tl(t0
, t0
);
4114 tcg_gen_xor_tl(t1
, t1
, t2
);
4115 tcg_gen_xor_tl(t2
, t0
, t2
);
4116 tcg_gen_andc_tl(t1
, t2
, t1
);
4118 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4120 /* operands of same sign, result different sign */
4121 generate_exception(ctx
, EXCP_OVERFLOW
);
4123 gen_store_gpr(t0
, rd
);
4128 if (rs
!= 0 && rt
!= 0) {
4129 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4130 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4131 } else if (rs
== 0 && rt
!= 0) {
4132 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4133 } else if (rs
!= 0 && rt
== 0) {
4134 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4136 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4141 TCGv t0
= tcg_temp_local_new();
4142 TCGv t1
= tcg_temp_new();
4143 TCGv t2
= tcg_temp_new();
4144 TCGLabel
*l1
= gen_new_label();
4146 gen_load_gpr(t1
, rs
);
4147 gen_load_gpr(t2
, rt
);
4148 tcg_gen_sub_tl(t0
, t1
, t2
);
4149 tcg_gen_ext32s_tl(t0
, t0
);
4150 tcg_gen_xor_tl(t2
, t1
, t2
);
4151 tcg_gen_xor_tl(t1
, t0
, t1
);
4152 tcg_gen_and_tl(t1
, t1
, t2
);
4154 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4157 * operands of different sign, first operand and the result
4160 generate_exception(ctx
, EXCP_OVERFLOW
);
4162 gen_store_gpr(t0
, rd
);
4167 if (rs
!= 0 && rt
!= 0) {
4168 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4169 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4170 } else if (rs
== 0 && rt
!= 0) {
4171 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4172 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4173 } else if (rs
!= 0 && rt
== 0) {
4174 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4176 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4179 #if defined(TARGET_MIPS64)
4182 TCGv t0
= tcg_temp_local_new();
4183 TCGv t1
= tcg_temp_new();
4184 TCGv t2
= tcg_temp_new();
4185 TCGLabel
*l1
= gen_new_label();
4187 gen_load_gpr(t1
, rs
);
4188 gen_load_gpr(t2
, rt
);
4189 tcg_gen_add_tl(t0
, t1
, t2
);
4190 tcg_gen_xor_tl(t1
, t1
, t2
);
4191 tcg_gen_xor_tl(t2
, t0
, t2
);
4192 tcg_gen_andc_tl(t1
, t2
, t1
);
4194 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4196 /* operands of same sign, result different sign */
4197 generate_exception(ctx
, EXCP_OVERFLOW
);
4199 gen_store_gpr(t0
, rd
);
4204 if (rs
!= 0 && rt
!= 0) {
4205 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4206 } else if (rs
== 0 && rt
!= 0) {
4207 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4208 } else if (rs
!= 0 && rt
== 0) {
4209 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4211 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4216 TCGv t0
= tcg_temp_local_new();
4217 TCGv t1
= tcg_temp_new();
4218 TCGv t2
= tcg_temp_new();
4219 TCGLabel
*l1
= gen_new_label();
4221 gen_load_gpr(t1
, rs
);
4222 gen_load_gpr(t2
, rt
);
4223 tcg_gen_sub_tl(t0
, t1
, t2
);
4224 tcg_gen_xor_tl(t2
, t1
, t2
);
4225 tcg_gen_xor_tl(t1
, t0
, t1
);
4226 tcg_gen_and_tl(t1
, t1
, t2
);
4228 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
4230 /* operands of different sign, first operand and result different sign */
4231 generate_exception(ctx
, EXCP_OVERFLOW
);
4233 gen_store_gpr(t0
, rd
);
4238 if (rs
!= 0 && rt
!= 0) {
4239 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4240 } else if (rs
== 0 && rt
!= 0) {
4241 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4242 } else if (rs
!= 0 && rt
== 0) {
4243 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4245 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4250 if (likely(rs
!= 0 && rt
!= 0)) {
4251 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4252 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4254 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4260 /* Conditional move */
4261 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
4262 int rd
, int rs
, int rt
)
4267 /* If no destination, treat it as a NOP. */
4271 t0
= tcg_temp_new();
4272 gen_load_gpr(t0
, rt
);
4273 t1
= tcg_const_tl(0);
4274 t2
= tcg_temp_new();
4275 gen_load_gpr(t2
, rs
);
4278 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4281 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
4284 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4287 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
4296 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
4297 int rd
, int rs
, int rt
)
4300 /* If no destination, treat it as a NOP. */
4306 if (likely(rs
!= 0 && rt
!= 0)) {
4307 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4309 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4313 if (rs
!= 0 && rt
!= 0) {
4314 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4315 } else if (rs
== 0 && rt
!= 0) {
4316 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4317 } else if (rs
!= 0 && rt
== 0) {
4318 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4320 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
4324 if (likely(rs
!= 0 && rt
!= 0)) {
4325 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4326 } else if (rs
== 0 && rt
!= 0) {
4327 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4328 } else if (rs
!= 0 && rt
== 0) {
4329 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4331 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4335 if (likely(rs
!= 0 && rt
!= 0)) {
4336 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
4337 } else if (rs
== 0 && rt
!= 0) {
4338 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
4339 } else if (rs
!= 0 && rt
== 0) {
4340 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
4342 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4348 /* Set on lower than */
4349 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
4350 int rd
, int rs
, int rt
)
4355 /* If no destination, treat it as a NOP. */
4359 t0
= tcg_temp_new();
4360 t1
= tcg_temp_new();
4361 gen_load_gpr(t0
, rs
);
4362 gen_load_gpr(t1
, rt
);
4365 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
4368 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
4376 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
4377 int rd
, int rs
, int rt
)
4383 * If no destination, treat it as a NOP.
4384 * For add & sub, we must generate the overflow exception when needed.
4389 t0
= tcg_temp_new();
4390 t1
= tcg_temp_new();
4391 gen_load_gpr(t0
, rs
);
4392 gen_load_gpr(t1
, rt
);
4395 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4396 tcg_gen_shl_tl(t0
, t1
, t0
);
4397 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4400 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4401 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4404 tcg_gen_ext32u_tl(t1
, t1
);
4405 tcg_gen_andi_tl(t0
, t0
, 0x1f);
4406 tcg_gen_shr_tl(t0
, t1
, t0
);
4407 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4411 TCGv_i32 t2
= tcg_temp_new_i32();
4412 TCGv_i32 t3
= tcg_temp_new_i32();
4414 tcg_gen_trunc_tl_i32(t2
, t0
);
4415 tcg_gen_trunc_tl_i32(t3
, t1
);
4416 tcg_gen_andi_i32(t2
, t2
, 0x1f);
4417 tcg_gen_rotr_i32(t2
, t3
, t2
);
4418 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4419 tcg_temp_free_i32(t2
);
4420 tcg_temp_free_i32(t3
);
4423 #if defined(TARGET_MIPS64)
4425 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4426 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
4429 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4430 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
4433 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4434 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
4437 tcg_gen_andi_tl(t0
, t0
, 0x3f);
4438 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
4446 #if defined(TARGET_MIPS64)
4447 /* Copy GPR to and from TX79 HI1/LO1 register. */
4448 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
4450 if (reg
== 0 && (opc
== MMI_OPC_MFHI1
|| opc
== MMI_OPC_MFLO1
)) {
4457 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[1]);
4460 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[1]);
4464 tcg_gen_mov_tl(cpu_HI
[1], cpu_gpr
[reg
]);
4466 tcg_gen_movi_tl(cpu_HI
[1], 0);
4471 tcg_gen_mov_tl(cpu_LO
[1], cpu_gpr
[reg
]);
4473 tcg_gen_movi_tl(cpu_LO
[1], 0);
4477 MIPS_INVAL("mfthilo1 TX79");
4478 generate_exception_end(ctx
, EXCP_RI
);
4484 /* Arithmetic on HI/LO registers */
4485 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
4487 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
4498 #if defined(TARGET_MIPS64)
4500 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4504 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
4508 #if defined(TARGET_MIPS64)
4510 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4514 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
4519 #if defined(TARGET_MIPS64)
4521 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4525 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
4528 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
4533 #if defined(TARGET_MIPS64)
4535 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4539 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
4542 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
4548 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
4551 TCGv t0
= tcg_const_tl(addr
);
4552 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
4553 gen_store_gpr(t0
, reg
);
4557 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4563 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4566 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4567 addr
= addr_add(ctx
, pc
, offset
);
4568 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4572 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4573 addr
= addr_add(ctx
, pc
, offset
);
4574 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4576 #if defined(TARGET_MIPS64)
4579 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4580 addr
= addr_add(ctx
, pc
, offset
);
4581 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4585 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4588 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4589 addr
= addr_add(ctx
, pc
, offset
);
4590 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4595 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4596 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4597 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4600 #if defined(TARGET_MIPS64)
4601 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4602 case R6_OPC_LDPC
+ (1 << 16):
4603 case R6_OPC_LDPC
+ (2 << 16):
4604 case R6_OPC_LDPC
+ (3 << 16):
4606 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4607 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4608 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4612 MIPS_INVAL("OPC_PCREL");
4613 generate_exception_end(ctx
, EXCP_RI
);
4620 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4629 t0
= tcg_temp_new();
4630 t1
= tcg_temp_new();
4632 gen_load_gpr(t0
, rs
);
4633 gen_load_gpr(t1
, rt
);
4638 TCGv t2
= tcg_temp_new();
4639 TCGv t3
= tcg_temp_new();
4640 tcg_gen_ext32s_tl(t0
, t0
);
4641 tcg_gen_ext32s_tl(t1
, t1
);
4642 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4643 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4644 tcg_gen_and_tl(t2
, t2
, t3
);
4645 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4646 tcg_gen_or_tl(t2
, t2
, t3
);
4647 tcg_gen_movi_tl(t3
, 0);
4648 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4649 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4650 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4657 TCGv t2
= tcg_temp_new();
4658 TCGv t3
= tcg_temp_new();
4659 tcg_gen_ext32s_tl(t0
, t0
);
4660 tcg_gen_ext32s_tl(t1
, t1
);
4661 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4662 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4663 tcg_gen_and_tl(t2
, t2
, t3
);
4664 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4665 tcg_gen_or_tl(t2
, t2
, t3
);
4666 tcg_gen_movi_tl(t3
, 0);
4667 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4668 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4669 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4676 TCGv t2
= tcg_const_tl(0);
4677 TCGv t3
= tcg_const_tl(1);
4678 tcg_gen_ext32u_tl(t0
, t0
);
4679 tcg_gen_ext32u_tl(t1
, t1
);
4680 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4681 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4682 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4689 TCGv t2
= tcg_const_tl(0);
4690 TCGv t3
= tcg_const_tl(1);
4691 tcg_gen_ext32u_tl(t0
, t0
);
4692 tcg_gen_ext32u_tl(t1
, t1
);
4693 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4694 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4695 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4702 TCGv_i32 t2
= tcg_temp_new_i32();
4703 TCGv_i32 t3
= tcg_temp_new_i32();
4704 tcg_gen_trunc_tl_i32(t2
, t0
);
4705 tcg_gen_trunc_tl_i32(t3
, t1
);
4706 tcg_gen_mul_i32(t2
, t2
, t3
);
4707 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4708 tcg_temp_free_i32(t2
);
4709 tcg_temp_free_i32(t3
);
4714 TCGv_i32 t2
= tcg_temp_new_i32();
4715 TCGv_i32 t3
= tcg_temp_new_i32();
4716 tcg_gen_trunc_tl_i32(t2
, t0
);
4717 tcg_gen_trunc_tl_i32(t3
, t1
);
4718 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4719 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4720 tcg_temp_free_i32(t2
);
4721 tcg_temp_free_i32(t3
);
4726 TCGv_i32 t2
= tcg_temp_new_i32();
4727 TCGv_i32 t3
= tcg_temp_new_i32();
4728 tcg_gen_trunc_tl_i32(t2
, t0
);
4729 tcg_gen_trunc_tl_i32(t3
, t1
);
4730 tcg_gen_mul_i32(t2
, t2
, t3
);
4731 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4732 tcg_temp_free_i32(t2
);
4733 tcg_temp_free_i32(t3
);
4738 TCGv_i32 t2
= tcg_temp_new_i32();
4739 TCGv_i32 t3
= tcg_temp_new_i32();
4740 tcg_gen_trunc_tl_i32(t2
, t0
);
4741 tcg_gen_trunc_tl_i32(t3
, t1
);
4742 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4743 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4744 tcg_temp_free_i32(t2
);
4745 tcg_temp_free_i32(t3
);
4748 #if defined(TARGET_MIPS64)
4751 TCGv t2
= tcg_temp_new();
4752 TCGv t3
= tcg_temp_new();
4753 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4754 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4755 tcg_gen_and_tl(t2
, t2
, t3
);
4756 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4757 tcg_gen_or_tl(t2
, t2
, t3
);
4758 tcg_gen_movi_tl(t3
, 0);
4759 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4760 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4767 TCGv t2
= tcg_temp_new();
4768 TCGv t3
= tcg_temp_new();
4769 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4770 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4771 tcg_gen_and_tl(t2
, t2
, t3
);
4772 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4773 tcg_gen_or_tl(t2
, t2
, t3
);
4774 tcg_gen_movi_tl(t3
, 0);
4775 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4776 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4783 TCGv t2
= tcg_const_tl(0);
4784 TCGv t3
= tcg_const_tl(1);
4785 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4786 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4793 TCGv t2
= tcg_const_tl(0);
4794 TCGv t3
= tcg_const_tl(1);
4795 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4796 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4802 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4806 TCGv t2
= tcg_temp_new();
4807 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4812 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4816 TCGv t2
= tcg_temp_new();
4817 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4823 MIPS_INVAL("r6 mul/div");
4824 generate_exception_end(ctx
, EXCP_RI
);
4832 #if defined(TARGET_MIPS64)
4833 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4837 t0
= tcg_temp_new();
4838 t1
= tcg_temp_new();
4840 gen_load_gpr(t0
, rs
);
4841 gen_load_gpr(t1
, rt
);
4846 TCGv t2
= tcg_temp_new();
4847 TCGv t3
= tcg_temp_new();
4848 tcg_gen_ext32s_tl(t0
, t0
);
4849 tcg_gen_ext32s_tl(t1
, t1
);
4850 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4851 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4852 tcg_gen_and_tl(t2
, t2
, t3
);
4853 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4854 tcg_gen_or_tl(t2
, t2
, t3
);
4855 tcg_gen_movi_tl(t3
, 0);
4856 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4857 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4858 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4859 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4860 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4867 TCGv t2
= tcg_const_tl(0);
4868 TCGv t3
= tcg_const_tl(1);
4869 tcg_gen_ext32u_tl(t0
, t0
);
4870 tcg_gen_ext32u_tl(t1
, t1
);
4871 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4872 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4873 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4874 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4875 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4881 MIPS_INVAL("div1 TX79");
4882 generate_exception_end(ctx
, EXCP_RI
);
4891 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4892 int acc
, int rs
, int rt
)
4896 t0
= tcg_temp_new();
4897 t1
= tcg_temp_new();
4899 gen_load_gpr(t0
, rs
);
4900 gen_load_gpr(t1
, rt
);
4909 TCGv t2
= tcg_temp_new();
4910 TCGv t3
= tcg_temp_new();
4911 tcg_gen_ext32s_tl(t0
, t0
);
4912 tcg_gen_ext32s_tl(t1
, t1
);
4913 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4914 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4915 tcg_gen_and_tl(t2
, t2
, t3
);
4916 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4917 tcg_gen_or_tl(t2
, t2
, t3
);
4918 tcg_gen_movi_tl(t3
, 0);
4919 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4920 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4921 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4922 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4923 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4930 TCGv t2
= tcg_const_tl(0);
4931 TCGv t3
= tcg_const_tl(1);
4932 tcg_gen_ext32u_tl(t0
, t0
);
4933 tcg_gen_ext32u_tl(t1
, t1
);
4934 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4935 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4936 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4937 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4938 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4945 TCGv_i32 t2
= tcg_temp_new_i32();
4946 TCGv_i32 t3
= tcg_temp_new_i32();
4947 tcg_gen_trunc_tl_i32(t2
, t0
);
4948 tcg_gen_trunc_tl_i32(t3
, t1
);
4949 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4950 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4951 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4952 tcg_temp_free_i32(t2
);
4953 tcg_temp_free_i32(t3
);
4958 TCGv_i32 t2
= tcg_temp_new_i32();
4959 TCGv_i32 t3
= tcg_temp_new_i32();
4960 tcg_gen_trunc_tl_i32(t2
, t0
);
4961 tcg_gen_trunc_tl_i32(t3
, t1
);
4962 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4963 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4964 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4965 tcg_temp_free_i32(t2
);
4966 tcg_temp_free_i32(t3
);
4969 #if defined(TARGET_MIPS64)
4972 TCGv t2
= tcg_temp_new();
4973 TCGv t3
= tcg_temp_new();
4974 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4975 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4976 tcg_gen_and_tl(t2
, t2
, t3
);
4977 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4978 tcg_gen_or_tl(t2
, t2
, t3
);
4979 tcg_gen_movi_tl(t3
, 0);
4980 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4981 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4982 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4989 TCGv t2
= tcg_const_tl(0);
4990 TCGv t3
= tcg_const_tl(1);
4991 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4992 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4993 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4999 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5002 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
5007 TCGv_i64 t2
= tcg_temp_new_i64();
5008 TCGv_i64 t3
= tcg_temp_new_i64();
5010 tcg_gen_ext_tl_i64(t2
, t0
);
5011 tcg_gen_ext_tl_i64(t3
, t1
);
5012 tcg_gen_mul_i64(t2
, t2
, t3
);
5013 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5014 tcg_gen_add_i64(t2
, t2
, t3
);
5015 tcg_temp_free_i64(t3
);
5016 gen_move_low32(cpu_LO
[acc
], t2
);
5017 gen_move_high32(cpu_HI
[acc
], t2
);
5018 tcg_temp_free_i64(t2
);
5023 TCGv_i64 t2
= tcg_temp_new_i64();
5024 TCGv_i64 t3
= tcg_temp_new_i64();
5026 tcg_gen_ext32u_tl(t0
, t0
);
5027 tcg_gen_ext32u_tl(t1
, t1
);
5028 tcg_gen_extu_tl_i64(t2
, t0
);
5029 tcg_gen_extu_tl_i64(t3
, t1
);
5030 tcg_gen_mul_i64(t2
, t2
, t3
);
5031 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5032 tcg_gen_add_i64(t2
, t2
, t3
);
5033 tcg_temp_free_i64(t3
);
5034 gen_move_low32(cpu_LO
[acc
], t2
);
5035 gen_move_high32(cpu_HI
[acc
], t2
);
5036 tcg_temp_free_i64(t2
);
5041 TCGv_i64 t2
= tcg_temp_new_i64();
5042 TCGv_i64 t3
= tcg_temp_new_i64();
5044 tcg_gen_ext_tl_i64(t2
, t0
);
5045 tcg_gen_ext_tl_i64(t3
, t1
);
5046 tcg_gen_mul_i64(t2
, t2
, t3
);
5047 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5048 tcg_gen_sub_i64(t2
, t3
, t2
);
5049 tcg_temp_free_i64(t3
);
5050 gen_move_low32(cpu_LO
[acc
], t2
);
5051 gen_move_high32(cpu_HI
[acc
], t2
);
5052 tcg_temp_free_i64(t2
);
5057 TCGv_i64 t2
= tcg_temp_new_i64();
5058 TCGv_i64 t3
= tcg_temp_new_i64();
5060 tcg_gen_ext32u_tl(t0
, t0
);
5061 tcg_gen_ext32u_tl(t1
, t1
);
5062 tcg_gen_extu_tl_i64(t2
, t0
);
5063 tcg_gen_extu_tl_i64(t3
, t1
);
5064 tcg_gen_mul_i64(t2
, t2
, t3
);
5065 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5066 tcg_gen_sub_i64(t2
, t3
, t2
);
5067 tcg_temp_free_i64(t3
);
5068 gen_move_low32(cpu_LO
[acc
], t2
);
5069 gen_move_high32(cpu_HI
[acc
], t2
);
5070 tcg_temp_free_i64(t2
);
5074 MIPS_INVAL("mul/div");
5075 generate_exception_end(ctx
, EXCP_RI
);
5084 * These MULT[U] and MADD[U] instructions implemented in for example
5085 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
5086 * architectures are special three-operand variants with the syntax
5088 * MULT[U][1] rd, rs, rt
5092 * (rd, LO, HI) <- rs * rt
5096 * MADD[U][1] rd, rs, rt
5100 * (rd, LO, HI) <- (LO, HI) + rs * rt
5102 * where the low-order 32-bits of the result is placed into both the
5103 * GPR rd and the special register LO. The high-order 32-bits of the
5104 * result is placed into the special register HI.
5106 * If the GPR rd is omitted in assembly language, it is taken to be 0,
5107 * which is the zero register that always reads as 0.
5109 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
5110 int rd
, int rs
, int rt
)
5112 TCGv t0
= tcg_temp_new();
5113 TCGv t1
= tcg_temp_new();
5116 gen_load_gpr(t0
, rs
);
5117 gen_load_gpr(t1
, rt
);
5125 TCGv_i32 t2
= tcg_temp_new_i32();
5126 TCGv_i32 t3
= tcg_temp_new_i32();
5127 tcg_gen_trunc_tl_i32(t2
, t0
);
5128 tcg_gen_trunc_tl_i32(t3
, t1
);
5129 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
5131 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5133 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5134 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5135 tcg_temp_free_i32(t2
);
5136 tcg_temp_free_i32(t3
);
5139 case MMI_OPC_MULTU1
:
5144 TCGv_i32 t2
= tcg_temp_new_i32();
5145 TCGv_i32 t3
= tcg_temp_new_i32();
5146 tcg_gen_trunc_tl_i32(t2
, t0
);
5147 tcg_gen_trunc_tl_i32(t3
, t1
);
5148 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
5150 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
5152 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
5153 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
5154 tcg_temp_free_i32(t2
);
5155 tcg_temp_free_i32(t3
);
5163 TCGv_i64 t2
= tcg_temp_new_i64();
5164 TCGv_i64 t3
= tcg_temp_new_i64();
5166 tcg_gen_ext_tl_i64(t2
, t0
);
5167 tcg_gen_ext_tl_i64(t3
, t1
);
5168 tcg_gen_mul_i64(t2
, t2
, t3
);
5169 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5170 tcg_gen_add_i64(t2
, t2
, t3
);
5171 tcg_temp_free_i64(t3
);
5172 gen_move_low32(cpu_LO
[acc
], t2
);
5173 gen_move_high32(cpu_HI
[acc
], t2
);
5175 gen_move_low32(cpu_gpr
[rd
], t2
);
5177 tcg_temp_free_i64(t2
);
5180 case MMI_OPC_MADDU1
:
5185 TCGv_i64 t2
= tcg_temp_new_i64();
5186 TCGv_i64 t3
= tcg_temp_new_i64();
5188 tcg_gen_ext32u_tl(t0
, t0
);
5189 tcg_gen_ext32u_tl(t1
, t1
);
5190 tcg_gen_extu_tl_i64(t2
, t0
);
5191 tcg_gen_extu_tl_i64(t3
, t1
);
5192 tcg_gen_mul_i64(t2
, t2
, t3
);
5193 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
5194 tcg_gen_add_i64(t2
, t2
, t3
);
5195 tcg_temp_free_i64(t3
);
5196 gen_move_low32(cpu_LO
[acc
], t2
);
5197 gen_move_high32(cpu_HI
[acc
], t2
);
5199 gen_move_low32(cpu_gpr
[rd
], t2
);
5201 tcg_temp_free_i64(t2
);
5205 MIPS_INVAL("mul/madd TXx9");
5206 generate_exception_end(ctx
, EXCP_RI
);
5215 static void gen_mul_vr54xx(DisasContext
*ctx
, uint32_t opc
,
5216 int rd
, int rs
, int rt
)
5218 TCGv t0
= tcg_temp_new();
5219 TCGv t1
= tcg_temp_new();
5221 gen_load_gpr(t0
, rs
);
5222 gen_load_gpr(t1
, rt
);
5225 case OPC_VR54XX_MULS
:
5226 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
5228 case OPC_VR54XX_MULSU
:
5229 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
5231 case OPC_VR54XX_MACC
:
5232 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
5234 case OPC_VR54XX_MACCU
:
5235 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
5237 case OPC_VR54XX_MSAC
:
5238 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
5240 case OPC_VR54XX_MSACU
:
5241 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
5243 case OPC_VR54XX_MULHI
:
5244 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
5246 case OPC_VR54XX_MULHIU
:
5247 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
5249 case OPC_VR54XX_MULSHI
:
5250 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
5252 case OPC_VR54XX_MULSHIU
:
5253 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
5255 case OPC_VR54XX_MACCHI
:
5256 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
5258 case OPC_VR54XX_MACCHIU
:
5259 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
5261 case OPC_VR54XX_MSACHI
:
5262 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
5264 case OPC_VR54XX_MSACHIU
:
5265 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
5268 MIPS_INVAL("mul vr54xx");
5269 generate_exception_end(ctx
, EXCP_RI
);
5272 gen_store_gpr(t0
, rd
);
5279 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
5289 gen_load_gpr(t0
, rs
);
5294 #if defined(TARGET_MIPS64)
5298 tcg_gen_not_tl(t0
, t0
);
5307 tcg_gen_ext32u_tl(t0
, t0
);
5308 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
5309 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
5311 #if defined(TARGET_MIPS64)
5316 tcg_gen_clzi_i64(t0
, t0
, 64);
5322 /* Godson integer instructions */
5323 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
5324 int rd
, int rs
, int rt
)
5336 case OPC_MULTU_G_2E
:
5337 case OPC_MULTU_G_2F
:
5338 #if defined(TARGET_MIPS64)
5339 case OPC_DMULT_G_2E
:
5340 case OPC_DMULT_G_2F
:
5341 case OPC_DMULTU_G_2E
:
5342 case OPC_DMULTU_G_2F
:
5344 t0
= tcg_temp_new();
5345 t1
= tcg_temp_new();
5348 t0
= tcg_temp_local_new();
5349 t1
= tcg_temp_local_new();
5353 gen_load_gpr(t0
, rs
);
5354 gen_load_gpr(t1
, rt
);
5359 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5360 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5362 case OPC_MULTU_G_2E
:
5363 case OPC_MULTU_G_2F
:
5364 tcg_gen_ext32u_tl(t0
, t0
);
5365 tcg_gen_ext32u_tl(t1
, t1
);
5366 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5367 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5372 TCGLabel
*l1
= gen_new_label();
5373 TCGLabel
*l2
= gen_new_label();
5374 TCGLabel
*l3
= gen_new_label();
5375 tcg_gen_ext32s_tl(t0
, t0
);
5376 tcg_gen_ext32s_tl(t1
, t1
);
5377 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5378 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5381 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5382 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5383 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5386 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5387 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5394 TCGLabel
*l1
= gen_new_label();
5395 TCGLabel
*l2
= gen_new_label();
5396 tcg_gen_ext32u_tl(t0
, t0
);
5397 tcg_gen_ext32u_tl(t1
, t1
);
5398 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5399 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5402 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5403 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5410 TCGLabel
*l1
= gen_new_label();
5411 TCGLabel
*l2
= gen_new_label();
5412 TCGLabel
*l3
= gen_new_label();
5413 tcg_gen_ext32u_tl(t0
, t0
);
5414 tcg_gen_ext32u_tl(t1
, t1
);
5415 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5416 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
5417 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
5419 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5422 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5423 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5430 TCGLabel
*l1
= gen_new_label();
5431 TCGLabel
*l2
= gen_new_label();
5432 tcg_gen_ext32u_tl(t0
, t0
);
5433 tcg_gen_ext32u_tl(t1
, t1
);
5434 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5435 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5438 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5439 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
5443 #if defined(TARGET_MIPS64)
5444 case OPC_DMULT_G_2E
:
5445 case OPC_DMULT_G_2F
:
5446 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5448 case OPC_DMULTU_G_2E
:
5449 case OPC_DMULTU_G_2F
:
5450 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
5455 TCGLabel
*l1
= gen_new_label();
5456 TCGLabel
*l2
= gen_new_label();
5457 TCGLabel
*l3
= gen_new_label();
5458 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5459 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5462 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5463 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5464 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5467 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
5471 case OPC_DDIVU_G_2E
:
5472 case OPC_DDIVU_G_2F
:
5474 TCGLabel
*l1
= gen_new_label();
5475 TCGLabel
*l2
= gen_new_label();
5476 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5477 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5480 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
5487 TCGLabel
*l1
= gen_new_label();
5488 TCGLabel
*l2
= gen_new_label();
5489 TCGLabel
*l3
= gen_new_label();
5490 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5491 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
5492 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
5494 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5497 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
5501 case OPC_DMODU_G_2E
:
5502 case OPC_DMODU_G_2F
:
5504 TCGLabel
*l1
= gen_new_label();
5505 TCGLabel
*l2
= gen_new_label();
5506 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
5507 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
5510 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
5521 /* Loongson multimedia instructions */
5522 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
5524 uint32_t opc
, shift_max
;
5527 opc
= MASK_LMI(ctx
->opcode
);
5533 t0
= tcg_temp_local_new_i64();
5534 t1
= tcg_temp_local_new_i64();
5537 t0
= tcg_temp_new_i64();
5538 t1
= tcg_temp_new_i64();
5542 check_cp1_enabled(ctx
);
5543 gen_load_fpr64(ctx
, t0
, rs
);
5544 gen_load_fpr64(ctx
, t1
, rt
);
5546 #define LMI_HELPER(UP, LO) \
5547 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
5548 #define LMI_HELPER_1(UP, LO) \
5549 case OPC_##UP: gen_helper_##LO(t0, t0); break
5550 #define LMI_DIRECT(UP, LO, OP) \
5551 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
5554 LMI_HELPER(PADDSH
, paddsh
);
5555 LMI_HELPER(PADDUSH
, paddush
);
5556 LMI_HELPER(PADDH
, paddh
);
5557 LMI_HELPER(PADDW
, paddw
);
5558 LMI_HELPER(PADDSB
, paddsb
);
5559 LMI_HELPER(PADDUSB
, paddusb
);
5560 LMI_HELPER(PADDB
, paddb
);
5562 LMI_HELPER(PSUBSH
, psubsh
);
5563 LMI_HELPER(PSUBUSH
, psubush
);
5564 LMI_HELPER(PSUBH
, psubh
);
5565 LMI_HELPER(PSUBW
, psubw
);
5566 LMI_HELPER(PSUBSB
, psubsb
);
5567 LMI_HELPER(PSUBUSB
, psubusb
);
5568 LMI_HELPER(PSUBB
, psubb
);
5570 LMI_HELPER(PSHUFH
, pshufh
);
5571 LMI_HELPER(PACKSSWH
, packsswh
);
5572 LMI_HELPER(PACKSSHB
, packsshb
);
5573 LMI_HELPER(PACKUSHB
, packushb
);
5575 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
5576 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
5577 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
5578 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
5579 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
5580 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
5582 LMI_HELPER(PAVGH
, pavgh
);
5583 LMI_HELPER(PAVGB
, pavgb
);
5584 LMI_HELPER(PMAXSH
, pmaxsh
);
5585 LMI_HELPER(PMINSH
, pminsh
);
5586 LMI_HELPER(PMAXUB
, pmaxub
);
5587 LMI_HELPER(PMINUB
, pminub
);
5589 LMI_HELPER(PCMPEQW
, pcmpeqw
);
5590 LMI_HELPER(PCMPGTW
, pcmpgtw
);
5591 LMI_HELPER(PCMPEQH
, pcmpeqh
);
5592 LMI_HELPER(PCMPGTH
, pcmpgth
);
5593 LMI_HELPER(PCMPEQB
, pcmpeqb
);
5594 LMI_HELPER(PCMPGTB
, pcmpgtb
);
5596 LMI_HELPER(PSLLW
, psllw
);
5597 LMI_HELPER(PSLLH
, psllh
);
5598 LMI_HELPER(PSRLW
, psrlw
);
5599 LMI_HELPER(PSRLH
, psrlh
);
5600 LMI_HELPER(PSRAW
, psraw
);
5601 LMI_HELPER(PSRAH
, psrah
);
5603 LMI_HELPER(PMULLH
, pmullh
);
5604 LMI_HELPER(PMULHH
, pmulhh
);
5605 LMI_HELPER(PMULHUH
, pmulhuh
);
5606 LMI_HELPER(PMADDHW
, pmaddhw
);
5608 LMI_HELPER(PASUBUB
, pasubub
);
5609 LMI_HELPER_1(BIADD
, biadd
);
5610 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
5612 LMI_DIRECT(PADDD
, paddd
, add
);
5613 LMI_DIRECT(PSUBD
, psubd
, sub
);
5614 LMI_DIRECT(XOR_CP2
, xor, xor);
5615 LMI_DIRECT(NOR_CP2
, nor
, nor
);
5616 LMI_DIRECT(AND_CP2
, and, and);
5617 LMI_DIRECT(OR_CP2
, or, or);
5620 tcg_gen_andc_i64(t0
, t1
, t0
);
5624 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5627 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5630 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5633 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5637 tcg_gen_andi_i64(t1
, t1
, 3);
5638 tcg_gen_shli_i64(t1
, t1
, 4);
5639 tcg_gen_shr_i64(t0
, t0
, t1
);
5640 tcg_gen_ext16u_i64(t0
, t0
);
5644 tcg_gen_add_i64(t0
, t0
, t1
);
5645 tcg_gen_ext32s_i64(t0
, t0
);
5648 tcg_gen_sub_i64(t0
, t0
, t1
);
5649 tcg_gen_ext32s_i64(t0
, t0
);
5671 /* Make sure shift count isn't TCG undefined behaviour. */
5672 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5677 tcg_gen_shl_i64(t0
, t0
, t1
);
5682 * Since SRA is UndefinedResult without sign-extended inputs,
5683 * we can treat SRA and DSRA the same.
5685 tcg_gen_sar_i64(t0
, t0
, t1
);
5688 /* We want to shift in zeros for SRL; zero-extend first. */
5689 tcg_gen_ext32u_i64(t0
, t0
);
5692 tcg_gen_shr_i64(t0
, t0
, t1
);
5696 if (shift_max
== 32) {
5697 tcg_gen_ext32s_i64(t0
, t0
);
5700 /* Shifts larger than MAX produce zero. */
5701 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5702 tcg_gen_neg_i64(t1
, t1
);
5703 tcg_gen_and_i64(t0
, t0
, t1
);
5709 TCGv_i64 t2
= tcg_temp_new_i64();
5710 TCGLabel
*lab
= gen_new_label();
5712 tcg_gen_mov_i64(t2
, t0
);
5713 tcg_gen_add_i64(t0
, t1
, t2
);
5714 if (opc
== OPC_ADD_CP2
) {
5715 tcg_gen_ext32s_i64(t0
, t0
);
5717 tcg_gen_xor_i64(t1
, t1
, t2
);
5718 tcg_gen_xor_i64(t2
, t2
, t0
);
5719 tcg_gen_andc_i64(t1
, t2
, t1
);
5720 tcg_temp_free_i64(t2
);
5721 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5722 generate_exception(ctx
, EXCP_OVERFLOW
);
5730 TCGv_i64 t2
= tcg_temp_new_i64();
5731 TCGLabel
*lab
= gen_new_label();
5733 tcg_gen_mov_i64(t2
, t0
);
5734 tcg_gen_sub_i64(t0
, t1
, t2
);
5735 if (opc
== OPC_SUB_CP2
) {
5736 tcg_gen_ext32s_i64(t0
, t0
);
5738 tcg_gen_xor_i64(t1
, t1
, t2
);
5739 tcg_gen_xor_i64(t2
, t2
, t0
);
5740 tcg_gen_and_i64(t1
, t1
, t2
);
5741 tcg_temp_free_i64(t2
);
5742 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5743 generate_exception(ctx
, EXCP_OVERFLOW
);
5749 tcg_gen_ext32u_i64(t0
, t0
);
5750 tcg_gen_ext32u_i64(t1
, t1
);
5751 tcg_gen_mul_i64(t0
, t0
, t1
);
5761 * ??? Document is unclear: Set FCC[CC]. Does that mean the
5762 * FD field is the CC field?
5765 MIPS_INVAL("loongson_cp2");
5766 generate_exception_end(ctx
, EXCP_RI
);
5773 gen_store_fpr64(ctx
, t0
, rd
);
5775 tcg_temp_free_i64(t0
);
5776 tcg_temp_free_i64(t1
);
5780 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
5781 int rs
, int rt
, int16_t imm
)
5784 TCGv t0
= tcg_temp_new();
5785 TCGv t1
= tcg_temp_new();
5788 /* Load needed operands */
5796 /* Compare two registers */
5798 gen_load_gpr(t0
, rs
);
5799 gen_load_gpr(t1
, rt
);
5809 /* Compare register to immediate */
5810 if (rs
!= 0 || imm
!= 0) {
5811 gen_load_gpr(t0
, rs
);
5812 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5819 case OPC_TEQ
: /* rs == rs */
5820 case OPC_TEQI
: /* r0 == 0 */
5821 case OPC_TGE
: /* rs >= rs */
5822 case OPC_TGEI
: /* r0 >= 0 */
5823 case OPC_TGEU
: /* rs >= rs unsigned */
5824 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5826 generate_exception_end(ctx
, EXCP_TRAP
);
5828 case OPC_TLT
: /* rs < rs */
5829 case OPC_TLTI
: /* r0 < 0 */
5830 case OPC_TLTU
: /* rs < rs unsigned */
5831 case OPC_TLTIU
: /* r0 < 0 unsigned */
5832 case OPC_TNE
: /* rs != rs */
5833 case OPC_TNEI
: /* r0 != 0 */
5834 /* Never trap: treat as NOP. */
5838 TCGLabel
*l1
= gen_new_label();
5843 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5847 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5851 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5855 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5859 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5863 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5866 generate_exception(ctx
, EXCP_TRAP
);
5873 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5875 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5879 #ifndef CONFIG_USER_ONLY
5880 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5886 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5888 if (use_goto_tb(ctx
, dest
)) {
5891 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5894 if (ctx
->base
.singlestep_enabled
) {
5895 save_cpu_state(ctx
, 0);
5896 gen_helper_raise_exception_debug(cpu_env
);
5898 tcg_gen_lookup_and_goto_ptr();
5902 /* Branches (before delay slot) */
5903 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
5905 int rs
, int rt
, int32_t offset
,
5908 target_ulong btgt
= -1;
5910 int bcond_compute
= 0;
5911 TCGv t0
= tcg_temp_new();
5912 TCGv t1
= tcg_temp_new();
5914 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5915 #ifdef MIPS_DEBUG_DISAS
5916 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5917 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5919 generate_exception_end(ctx
, EXCP_RI
);
5923 /* Load needed operands */
5929 /* Compare two registers */
5931 gen_load_gpr(t0
, rs
);
5932 gen_load_gpr(t1
, rt
);
5935 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5949 /* Compare to zero */
5951 gen_load_gpr(t0
, rs
);
5954 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5957 #if defined(TARGET_MIPS64)
5959 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5961 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5964 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5969 /* Jump to immediate */
5970 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5975 /* Jump to register */
5976 if (offset
!= 0 && offset
!= 16) {
5978 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5979 * others are reserved.
5981 MIPS_INVAL("jump hint");
5982 generate_exception_end(ctx
, EXCP_RI
);
5985 gen_load_gpr(btarget
, rs
);
5988 MIPS_INVAL("branch/jump");
5989 generate_exception_end(ctx
, EXCP_RI
);
5992 if (bcond_compute
== 0) {
5993 /* No condition to be computed */
5995 case OPC_BEQ
: /* rx == rx */
5996 case OPC_BEQL
: /* rx == rx likely */
5997 case OPC_BGEZ
: /* 0 >= 0 */
5998 case OPC_BGEZL
: /* 0 >= 0 likely */
5999 case OPC_BLEZ
: /* 0 <= 0 */
6000 case OPC_BLEZL
: /* 0 <= 0 likely */
6002 ctx
->hflags
|= MIPS_HFLAG_B
;
6004 case OPC_BGEZAL
: /* 0 >= 0 */
6005 case OPC_BGEZALL
: /* 0 >= 0 likely */
6006 /* Always take and link */
6008 ctx
->hflags
|= MIPS_HFLAG_B
;
6010 case OPC_BNE
: /* rx != rx */
6011 case OPC_BGTZ
: /* 0 > 0 */
6012 case OPC_BLTZ
: /* 0 < 0 */
6015 case OPC_BLTZAL
: /* 0 < 0 */
6017 * Handle as an unconditional branch to get correct delay
6021 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
6022 ctx
->hflags
|= MIPS_HFLAG_B
;
6024 case OPC_BLTZALL
: /* 0 < 0 likely */
6025 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6026 /* Skip the instruction in the delay slot */
6027 ctx
->base
.pc_next
+= 4;
6029 case OPC_BNEL
: /* rx != rx likely */
6030 case OPC_BGTZL
: /* 0 > 0 likely */
6031 case OPC_BLTZL
: /* 0 < 0 likely */
6032 /* Skip the instruction in the delay slot */
6033 ctx
->base
.pc_next
+= 4;
6036 ctx
->hflags
|= MIPS_HFLAG_B
;
6039 ctx
->hflags
|= MIPS_HFLAG_BX
;
6043 ctx
->hflags
|= MIPS_HFLAG_B
;
6046 ctx
->hflags
|= MIPS_HFLAG_BR
;
6050 ctx
->hflags
|= MIPS_HFLAG_BR
;
6053 MIPS_INVAL("branch/jump");
6054 generate_exception_end(ctx
, EXCP_RI
);
6060 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6063 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6066 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6069 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6072 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6075 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6078 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6082 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6086 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6089 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6092 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6095 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6098 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6101 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6104 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6106 #if defined(TARGET_MIPS64)
6108 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6112 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6115 ctx
->hflags
|= MIPS_HFLAG_BC
;
6118 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6121 ctx
->hflags
|= MIPS_HFLAG_BL
;
6124 MIPS_INVAL("conditional branch/jump");
6125 generate_exception_end(ctx
, EXCP_RI
);
6130 ctx
->btarget
= btgt
;
6132 switch (delayslot_size
) {
6134 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6137 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6142 int post_delay
= insn_bytes
+ delayslot_size
;
6143 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6145 tcg_gen_movi_tl(cpu_gpr
[blink
],
6146 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6150 if (insn_bytes
== 2) {
6151 ctx
->hflags
|= MIPS_HFLAG_B16
;
6158 /* nanoMIPS Branches */
6159 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6161 int rs
, int rt
, int32_t offset
)
6163 target_ulong btgt
= -1;
6164 int bcond_compute
= 0;
6165 TCGv t0
= tcg_temp_new();
6166 TCGv t1
= tcg_temp_new();
6168 /* Load needed operands */
6172 /* Compare two registers */
6174 gen_load_gpr(t0
, rs
);
6175 gen_load_gpr(t1
, rt
);
6178 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6181 /* Compare to zero */
6183 gen_load_gpr(t0
, rs
);
6186 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6189 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6191 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6195 /* Jump to register */
6196 if (offset
!= 0 && offset
!= 16) {
6198 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6199 * others are reserved.
6201 MIPS_INVAL("jump hint");
6202 generate_exception_end(ctx
, EXCP_RI
);
6205 gen_load_gpr(btarget
, rs
);
6208 MIPS_INVAL("branch/jump");
6209 generate_exception_end(ctx
, EXCP_RI
);
6212 if (bcond_compute
== 0) {
6213 /* No condition to be computed */
6215 case OPC_BEQ
: /* rx == rx */
6217 ctx
->hflags
|= MIPS_HFLAG_B
;
6219 case OPC_BGEZAL
: /* 0 >= 0 */
6220 /* Always take and link */
6221 tcg_gen_movi_tl(cpu_gpr
[31],
6222 ctx
->base
.pc_next
+ insn_bytes
);
6223 ctx
->hflags
|= MIPS_HFLAG_B
;
6225 case OPC_BNE
: /* rx != rx */
6226 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6227 /* Skip the instruction in the delay slot */
6228 ctx
->base
.pc_next
+= 4;
6231 ctx
->hflags
|= MIPS_HFLAG_BR
;
6235 tcg_gen_movi_tl(cpu_gpr
[rt
],
6236 ctx
->base
.pc_next
+ insn_bytes
);
6238 ctx
->hflags
|= MIPS_HFLAG_BR
;
6241 MIPS_INVAL("branch/jump");
6242 generate_exception_end(ctx
, EXCP_RI
);
6248 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6251 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6254 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6255 tcg_gen_movi_tl(cpu_gpr
[31],
6256 ctx
->base
.pc_next
+ insn_bytes
);
6259 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6261 ctx
->hflags
|= MIPS_HFLAG_BC
;
6264 MIPS_INVAL("conditional branch/jump");
6265 generate_exception_end(ctx
, EXCP_RI
);
6270 ctx
->btarget
= btgt
;
6273 if (insn_bytes
== 2) {
6274 ctx
->hflags
|= MIPS_HFLAG_B16
;
6281 /* special3 bitfield operations */
6282 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
6283 int rs
, int lsb
, int msb
)
6285 TCGv t0
= tcg_temp_new();
6286 TCGv t1
= tcg_temp_new();
6288 gen_load_gpr(t1
, rs
);
6291 if (lsb
+ msb
> 31) {
6295 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6298 * The two checks together imply that lsb == 0,
6299 * so this is a simple sign-extension.
6301 tcg_gen_ext32s_tl(t0
, t1
);
6304 #if defined(TARGET_MIPS64)
6313 if (lsb
+ msb
> 63) {
6316 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6323 gen_load_gpr(t0
, rt
);
6324 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6325 tcg_gen_ext32s_tl(t0
, t0
);
6327 #if defined(TARGET_MIPS64)
6338 gen_load_gpr(t0
, rt
);
6339 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6344 MIPS_INVAL("bitops");
6345 generate_exception_end(ctx
, EXCP_RI
);
6350 gen_store_gpr(t0
, rt
);
6355 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6360 /* If no destination, treat it as a NOP. */
6364 t0
= tcg_temp_new();
6365 gen_load_gpr(t0
, rt
);
6369 TCGv t1
= tcg_temp_new();
6370 TCGv t2
= tcg_const_tl(0x00FF00FF);
6372 tcg_gen_shri_tl(t1
, t0
, 8);
6373 tcg_gen_and_tl(t1
, t1
, t2
);
6374 tcg_gen_and_tl(t0
, t0
, t2
);
6375 tcg_gen_shli_tl(t0
, t0
, 8);
6376 tcg_gen_or_tl(t0
, t0
, t1
);
6379 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6383 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6386 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6388 #if defined(TARGET_MIPS64)
6391 TCGv t1
= tcg_temp_new();
6392 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6394 tcg_gen_shri_tl(t1
, t0
, 8);
6395 tcg_gen_and_tl(t1
, t1
, t2
);
6396 tcg_gen_and_tl(t0
, t0
, t2
);
6397 tcg_gen_shli_tl(t0
, t0
, 8);
6398 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6405 TCGv t1
= tcg_temp_new();
6406 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6408 tcg_gen_shri_tl(t1
, t0
, 16);
6409 tcg_gen_and_tl(t1
, t1
, t2
);
6410 tcg_gen_and_tl(t0
, t0
, t2
);
6411 tcg_gen_shli_tl(t0
, t0
, 16);
6412 tcg_gen_or_tl(t0
, t0
, t1
);
6413 tcg_gen_shri_tl(t1
, t0
, 32);
6414 tcg_gen_shli_tl(t0
, t0
, 32);
6415 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6422 MIPS_INVAL("bsfhl");
6423 generate_exception_end(ctx
, EXCP_RI
);
6430 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
6439 t0
= tcg_temp_new();
6440 t1
= tcg_temp_new();
6441 gen_load_gpr(t0
, rs
);
6442 gen_load_gpr(t1
, rt
);
6443 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
6444 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
6445 if (opc
== OPC_LSA
) {
6446 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
6455 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6463 t0
= tcg_temp_new();
6464 if (bits
== 0 || bits
== wordsz
) {
6466 gen_load_gpr(t0
, rt
);
6468 gen_load_gpr(t0
, rs
);
6472 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6474 #if defined(TARGET_MIPS64)
6476 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6481 TCGv t1
= tcg_temp_new();
6482 gen_load_gpr(t0
, rt
);
6483 gen_load_gpr(t1
, rs
);
6487 TCGv_i64 t2
= tcg_temp_new_i64();
6488 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6489 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6490 gen_move_low32(cpu_gpr
[rd
], t2
);
6491 tcg_temp_free_i64(t2
);
6494 #if defined(TARGET_MIPS64)
6496 tcg_gen_shli_tl(t0
, t0
, bits
);
6497 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6498 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6508 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6511 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6514 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6517 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6520 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6527 t0
= tcg_temp_new();
6528 gen_load_gpr(t0
, rt
);
6531 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6533 #if defined(TARGET_MIPS64)
6535 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6542 #ifndef CONFIG_USER_ONLY
6543 /* CP0 (MMU and control) */
6544 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6546 TCGv_i64 t0
= tcg_temp_new_i64();
6547 TCGv_i64 t1
= tcg_temp_new_i64();
6549 tcg_gen_ext_tl_i64(t0
, arg
);
6550 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6551 #if defined(TARGET_MIPS64)
6552 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6554 tcg_gen_concat32_i64(t1
, t1
, t0
);
6556 tcg_gen_st_i64(t1
, cpu_env
, off
);
6557 tcg_temp_free_i64(t1
);
6558 tcg_temp_free_i64(t0
);
6561 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6563 TCGv_i64 t0
= tcg_temp_new_i64();
6564 TCGv_i64 t1
= tcg_temp_new_i64();
6566 tcg_gen_ext_tl_i64(t0
, arg
);
6567 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6568 tcg_gen_concat32_i64(t1
, t1
, t0
);
6569 tcg_gen_st_i64(t1
, cpu_env
, off
);
6570 tcg_temp_free_i64(t1
);
6571 tcg_temp_free_i64(t0
);
6574 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6576 TCGv_i64 t0
= tcg_temp_new_i64();
6578 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6579 #if defined(TARGET_MIPS64)
6580 tcg_gen_shri_i64(t0
, t0
, 30);
6582 tcg_gen_shri_i64(t0
, t0
, 32);
6584 gen_move_low32(arg
, t0
);
6585 tcg_temp_free_i64(t0
);
6588 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6590 TCGv_i64 t0
= tcg_temp_new_i64();
6592 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6593 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6594 gen_move_low32(arg
, t0
);
6595 tcg_temp_free_i64(t0
);
6598 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
6600 TCGv_i32 t0
= tcg_temp_new_i32();
6602 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6603 tcg_gen_ext_i32_tl(arg
, t0
);
6604 tcg_temp_free_i32(t0
);
6607 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
6609 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6610 tcg_gen_ext32s_tl(arg
, arg
);
6613 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
6615 TCGv_i32 t0
= tcg_temp_new_i32();
6617 tcg_gen_trunc_tl_i32(t0
, arg
);
6618 tcg_gen_st_i32(t0
, cpu_env
, off
);
6619 tcg_temp_free_i32(t0
);
6622 #define CP0_CHECK(c) \
6625 goto cp0_unimplemented; \
6629 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6631 const char *register_name
= "invalid";
6634 case CP0_REGISTER_02
:
6637 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6638 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6639 register_name
= "EntryLo0";
6642 goto cp0_unimplemented
;
6645 case CP0_REGISTER_03
:
6648 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6649 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6650 register_name
= "EntryLo1";
6653 goto cp0_unimplemented
;
6656 case CP0_REGISTER_09
:
6659 CP0_CHECK(ctx
->saar
);
6660 gen_helper_mfhc0_saar(arg
, cpu_env
);
6661 register_name
= "SAAR";
6664 goto cp0_unimplemented
;
6667 case CP0_REGISTER_17
:
6670 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6671 ctx
->CP0_LLAddr_shift
);
6672 register_name
= "LLAddr";
6675 CP0_CHECK(ctx
->mrp
);
6676 gen_helper_mfhc0_maar(arg
, cpu_env
);
6677 register_name
= "MAAR";
6680 goto cp0_unimplemented
;
6683 case CP0_REGISTER_28
:
6689 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6690 register_name
= "TagLo";
6693 goto cp0_unimplemented
;
6697 goto cp0_unimplemented
;
6699 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6703 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6704 register_name
, reg
, sel
);
6705 tcg_gen_movi_tl(arg
, 0);
6708 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6710 const char *register_name
= "invalid";
6711 uint64_t mask
= ctx
->PAMask
>> 36;
6714 case CP0_REGISTER_02
:
6717 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6718 tcg_gen_andi_tl(arg
, arg
, mask
);
6719 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6720 register_name
= "EntryLo0";
6723 goto cp0_unimplemented
;
6726 case CP0_REGISTER_03
:
6729 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6730 tcg_gen_andi_tl(arg
, arg
, mask
);
6731 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6732 register_name
= "EntryLo1";
6735 goto cp0_unimplemented
;
6738 case CP0_REGISTER_09
:
6741 CP0_CHECK(ctx
->saar
);
6742 gen_helper_mthc0_saar(cpu_env
, arg
);
6743 register_name
= "SAAR";
6746 goto cp0_unimplemented
;
6748 case CP0_REGISTER_17
:
6752 * LLAddr is read-only (the only exception is bit 0 if LLB is
6753 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
6754 * relevant for modern MIPS cores supporting MTHC0, therefore
6755 * treating MTHC0 to LLAddr as NOP.
6757 register_name
= "LLAddr";
6760 CP0_CHECK(ctx
->mrp
);
6761 gen_helper_mthc0_maar(cpu_env
, arg
);
6762 register_name
= "MAAR";
6765 goto cp0_unimplemented
;
6768 case CP0_REGISTER_28
:
6774 tcg_gen_andi_tl(arg
, arg
, mask
);
6775 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6776 register_name
= "TagLo";
6779 goto cp0_unimplemented
;
6783 goto cp0_unimplemented
;
6785 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6788 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6789 register_name
, reg
, sel
);
6792 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6794 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
6795 tcg_gen_movi_tl(arg
, 0);
6797 tcg_gen_movi_tl(arg
, ~0);
6801 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6803 const char *register_name
= "invalid";
6806 check_insn(ctx
, ISA_MIPS32
);
6810 case CP0_REGISTER_00
:
6813 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6814 register_name
= "Index";
6817 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6818 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6819 register_name
= "MVPControl";
6822 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6823 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6824 register_name
= "MVPConf0";
6827 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6828 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6829 register_name
= "MVPConf1";
6833 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6834 register_name
= "VPControl";
6837 goto cp0_unimplemented
;
6840 case CP0_REGISTER_01
:
6843 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6844 gen_helper_mfc0_random(arg
, cpu_env
);
6845 register_name
= "Random";
6848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6849 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6850 register_name
= "VPEControl";
6853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6854 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6855 register_name
= "VPEConf0";
6858 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6860 register_name
= "VPEConf1";
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6865 register_name
= "YQMask";
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6870 register_name
= "VPESchedule";
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6874 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6875 register_name
= "VPEScheFBack";
6878 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6879 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6880 register_name
= "VPEOpt";
6883 goto cp0_unimplemented
;
6886 case CP0_REGISTER_02
:
6890 TCGv_i64 tmp
= tcg_temp_new_i64();
6891 tcg_gen_ld_i64(tmp
, cpu_env
,
6892 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6893 #if defined(TARGET_MIPS64)
6895 /* Move RI/XI fields to bits 31:30 */
6896 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6897 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6900 gen_move_low32(arg
, tmp
);
6901 tcg_temp_free_i64(tmp
);
6903 register_name
= "EntryLo0";
6906 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6907 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6908 register_name
= "TCStatus";
6911 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6912 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6913 register_name
= "TCBind";
6916 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6917 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6918 register_name
= "TCRestart";
6921 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6922 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6923 register_name
= "TCHalt";
6926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6927 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6928 register_name
= "TCContext";
6931 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6932 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6933 register_name
= "TCSchedule";
6936 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6937 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6938 register_name
= "TCScheFBack";
6941 goto cp0_unimplemented
;
6944 case CP0_REGISTER_03
:
6948 TCGv_i64 tmp
= tcg_temp_new_i64();
6949 tcg_gen_ld_i64(tmp
, cpu_env
,
6950 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6951 #if defined(TARGET_MIPS64)
6953 /* Move RI/XI fields to bits 31:30 */
6954 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6955 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6958 gen_move_low32(arg
, tmp
);
6959 tcg_temp_free_i64(tmp
);
6961 register_name
= "EntryLo1";
6965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6966 register_name
= "GlobalNumber";
6969 goto cp0_unimplemented
;
6972 case CP0_REGISTER_04
:
6975 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6976 tcg_gen_ext32s_tl(arg
, arg
);
6977 register_name
= "Context";
6980 /* gen_helper_mfc0_contextconfig(arg); - SmartMIPS ASE */
6981 register_name
= "ContextConfig";
6982 goto cp0_unimplemented
;
6984 CP0_CHECK(ctx
->ulri
);
6985 tcg_gen_ld_tl(arg
, cpu_env
,
6986 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6987 tcg_gen_ext32s_tl(arg
, arg
);
6988 register_name
= "UserLocal";
6991 goto cp0_unimplemented
;
6994 case CP0_REGISTER_05
:
6997 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6998 register_name
= "PageMask";
7001 check_insn(ctx
, ISA_MIPS32R2
);
7002 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7003 register_name
= "PageGrain";
7007 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7008 tcg_gen_ext32s_tl(arg
, arg
);
7009 register_name
= "SegCtl0";
7013 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7014 tcg_gen_ext32s_tl(arg
, arg
);
7015 register_name
= "SegCtl1";
7019 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7020 tcg_gen_ext32s_tl(arg
, arg
);
7021 register_name
= "SegCtl2";
7025 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7026 register_name
= "PWBase";
7030 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
7031 register_name
= "PWField";
7035 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
7036 register_name
= "PWSize";
7039 goto cp0_unimplemented
;
7042 case CP0_REGISTER_06
:
7045 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7046 register_name
= "Wired";
7049 check_insn(ctx
, ISA_MIPS32R2
);
7050 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7051 register_name
= "SRSConf0";
7054 check_insn(ctx
, ISA_MIPS32R2
);
7055 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7056 register_name
= "SRSConf1";
7059 check_insn(ctx
, ISA_MIPS32R2
);
7060 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7061 register_name
= "SRSConf2";
7064 check_insn(ctx
, ISA_MIPS32R2
);
7065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7066 register_name
= "SRSConf3";
7069 check_insn(ctx
, ISA_MIPS32R2
);
7070 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7071 register_name
= "SRSConf4";
7075 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7076 register_name
= "PWCtl";
7079 goto cp0_unimplemented
;
7082 case CP0_REGISTER_07
:
7085 check_insn(ctx
, ISA_MIPS32R2
);
7086 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7087 register_name
= "HWREna";
7090 goto cp0_unimplemented
;
7093 case CP0_REGISTER_08
:
7096 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7097 tcg_gen_ext32s_tl(arg
, arg
);
7098 register_name
= "BadVAddr";
7102 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7103 register_name
= "BadInstr";
7107 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7108 register_name
= "BadInstrP";
7112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7113 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7114 register_name
= "BadInstrX";
7117 goto cp0_unimplemented
;
7120 case CP0_REGISTER_09
:
7123 /* Mark as an IO operation because we read the time. */
7124 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7127 gen_helper_mfc0_count(arg
, cpu_env
);
7128 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7132 * Break the TB to be able to take timer interrupts immediately
7133 * after reading count. DISAS_STOP isn't sufficient, we need to
7134 * ensure we break completely out of translated code.
7136 gen_save_pc(ctx
->base
.pc_next
+ 4);
7137 ctx
->base
.is_jmp
= DISAS_EXIT
;
7138 register_name
= "Count";
7141 CP0_CHECK(ctx
->saar
);
7142 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7143 register_name
= "SAARI";
7146 CP0_CHECK(ctx
->saar
);
7147 gen_helper_mfc0_saar(arg
, cpu_env
);
7148 register_name
= "SAAR";
7151 goto cp0_unimplemented
;
7154 case CP0_REGISTER_10
:
7157 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7158 tcg_gen_ext32s_tl(arg
, arg
);
7159 register_name
= "EntryHi";
7162 goto cp0_unimplemented
;
7165 case CP0_REGISTER_11
:
7168 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7169 register_name
= "Compare";
7171 /* 6,7 are implementation dependent */
7173 goto cp0_unimplemented
;
7176 case CP0_REGISTER_12
:
7179 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7180 register_name
= "Status";
7183 check_insn(ctx
, ISA_MIPS32R2
);
7184 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7185 register_name
= "IntCtl";
7188 check_insn(ctx
, ISA_MIPS32R2
);
7189 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7190 register_name
= "SRSCtl";
7193 check_insn(ctx
, ISA_MIPS32R2
);
7194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7195 register_name
= "SRSMap";
7198 goto cp0_unimplemented
;
7201 case CP0_REGISTER_13
:
7204 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7205 register_name
= "Cause";
7208 goto cp0_unimplemented
;
7211 case CP0_REGISTER_14
:
7214 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7215 tcg_gen_ext32s_tl(arg
, arg
);
7216 register_name
= "EPC";
7219 goto cp0_unimplemented
;
7222 case CP0_REGISTER_15
:
7225 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7226 register_name
= "PRid";
7229 check_insn(ctx
, ISA_MIPS32R2
);
7230 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7231 tcg_gen_ext32s_tl(arg
, arg
);
7232 register_name
= "EBase";
7235 check_insn(ctx
, ISA_MIPS32R2
);
7236 CP0_CHECK(ctx
->cmgcr
);
7237 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7238 tcg_gen_ext32s_tl(arg
, arg
);
7239 register_name
= "CMGCRBase";
7242 goto cp0_unimplemented
;
7245 case CP0_REGISTER_16
:
7248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7249 register_name
= "Config";
7252 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7253 register_name
= "Config1";
7256 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7257 register_name
= "Config2";
7260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7261 register_name
= "Config3";
7264 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7265 register_name
= "Config4";
7268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7269 register_name
= "Config5";
7271 /* 6,7 are implementation dependent */
7273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7274 register_name
= "Config6";
7277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7278 register_name
= "Config7";
7281 goto cp0_unimplemented
;
7284 case CP0_REGISTER_17
:
7287 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7288 register_name
= "LLAddr";
7291 CP0_CHECK(ctx
->mrp
);
7292 gen_helper_mfc0_maar(arg
, cpu_env
);
7293 register_name
= "MAAR";
7296 CP0_CHECK(ctx
->mrp
);
7297 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7298 register_name
= "MAARI";
7301 goto cp0_unimplemented
;
7304 case CP0_REGISTER_18
:
7314 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7315 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7316 register_name
= "WatchLo";
7319 goto cp0_unimplemented
;
7322 case CP0_REGISTER_19
:
7332 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7333 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7334 register_name
= "WatchHi";
7337 goto cp0_unimplemented
;
7340 case CP0_REGISTER_20
:
7343 #if defined(TARGET_MIPS64)
7344 check_insn(ctx
, ISA_MIPS3
);
7345 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7346 tcg_gen_ext32s_tl(arg
, arg
);
7347 register_name
= "XContext";
7351 goto cp0_unimplemented
;
7354 case CP0_REGISTER_21
:
7355 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7356 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7360 register_name
= "Framemask";
7363 goto cp0_unimplemented
;
7366 case CP0_REGISTER_22
:
7367 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7368 register_name
= "'Diagnostic"; /* implementation dependent */
7370 case CP0_REGISTER_23
:
7373 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7374 register_name
= "Debug";
7377 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
7378 register_name
= "TraceControl";
7379 goto cp0_unimplemented
;
7381 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
7382 register_name
= "TraceControl2";
7383 goto cp0_unimplemented
;
7385 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
7386 register_name
= "UserTraceData";
7387 goto cp0_unimplemented
;
7389 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
7390 register_name
= "TraceBPC";
7391 goto cp0_unimplemented
;
7393 goto cp0_unimplemented
;
7396 case CP0_REGISTER_24
:
7400 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7401 tcg_gen_ext32s_tl(arg
, arg
);
7402 register_name
= "DEPC";
7405 goto cp0_unimplemented
;
7408 case CP0_REGISTER_25
:
7411 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7412 register_name
= "Performance0";
7415 /* gen_helper_mfc0_performance1(arg); */
7416 register_name
= "Performance1";
7417 goto cp0_unimplemented
;
7419 /* gen_helper_mfc0_performance2(arg); */
7420 register_name
= "Performance2";
7421 goto cp0_unimplemented
;
7423 /* gen_helper_mfc0_performance3(arg); */
7424 register_name
= "Performance3";
7425 goto cp0_unimplemented
;
7427 /* gen_helper_mfc0_performance4(arg); */
7428 register_name
= "Performance4";
7429 goto cp0_unimplemented
;
7431 /* gen_helper_mfc0_performance5(arg); */
7432 register_name
= "Performance5";
7433 goto cp0_unimplemented
;
7435 /* gen_helper_mfc0_performance6(arg); */
7436 register_name
= "Performance6";
7437 goto cp0_unimplemented
;
7439 /* gen_helper_mfc0_performance7(arg); */
7440 register_name
= "Performance7";
7441 goto cp0_unimplemented
;
7443 goto cp0_unimplemented
;
7446 case CP0_REGISTER_26
:
7449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7450 register_name
= "ErrCtl";
7453 goto cp0_unimplemented
;
7456 case CP0_REGISTER_27
:
7462 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7463 register_name
= "CacheErr";
7466 goto cp0_unimplemented
;
7469 case CP0_REGISTER_28
:
7476 TCGv_i64 tmp
= tcg_temp_new_i64();
7477 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7478 gen_move_low32(arg
, tmp
);
7479 tcg_temp_free_i64(tmp
);
7481 register_name
= "TagLo";
7487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7488 register_name
= "DataLo";
7491 goto cp0_unimplemented
;
7494 case CP0_REGISTER_29
:
7500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7501 register_name
= "TagHi";
7507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7508 register_name
= "DataHi";
7511 goto cp0_unimplemented
;
7514 case CP0_REGISTER_30
:
7517 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7518 tcg_gen_ext32s_tl(arg
, arg
);
7519 register_name
= "ErrorEPC";
7522 goto cp0_unimplemented
;
7525 case CP0_REGISTER_31
:
7529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7530 register_name
= "DESAVE";
7538 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7539 tcg_gen_ld_tl(arg
, cpu_env
,
7540 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7541 tcg_gen_ext32s_tl(arg
, arg
);
7542 register_name
= "KScratch";
7545 goto cp0_unimplemented
;
7549 goto cp0_unimplemented
;
7551 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7555 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7556 register_name
, reg
, sel
);
7557 gen_mfc0_unimplemented(ctx
, arg
);
7560 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7562 const char *register_name
= "invalid";
7565 check_insn(ctx
, ISA_MIPS32
);
7568 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7573 case CP0_REGISTER_00
:
7576 gen_helper_mtc0_index(cpu_env
, arg
);
7577 register_name
= "Index";
7580 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7581 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7582 register_name
= "MVPControl";
7585 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7587 register_name
= "MVPConf0";
7590 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7592 register_name
= "MVPConf1";
7597 register_name
= "VPControl";
7600 goto cp0_unimplemented
;
7603 case CP0_REGISTER_01
:
7607 register_name
= "Random";
7610 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7611 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7612 register_name
= "VPEControl";
7615 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7616 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7617 register_name
= "VPEConf0";
7620 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7621 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7622 register_name
= "VPEConf1";
7625 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7626 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7627 register_name
= "YQMask";
7630 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7631 tcg_gen_st_tl(arg
, cpu_env
,
7632 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7633 register_name
= "VPESchedule";
7636 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7637 tcg_gen_st_tl(arg
, cpu_env
,
7638 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7639 register_name
= "VPEScheFBack";
7642 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7643 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7644 register_name
= "VPEOpt";
7647 goto cp0_unimplemented
;
7650 case CP0_REGISTER_02
:
7653 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7654 register_name
= "EntryLo0";
7657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7658 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7659 register_name
= "TCStatus";
7662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7663 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7664 register_name
= "TCBind";
7667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7668 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7669 register_name
= "TCRestart";
7672 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7673 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7674 register_name
= "TCHalt";
7677 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7678 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7679 register_name
= "TCContext";
7682 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7683 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7684 register_name
= "TCSchedule";
7687 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7688 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7689 register_name
= "TCScheFBack";
7692 goto cp0_unimplemented
;
7695 case CP0_REGISTER_03
:
7698 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7699 register_name
= "EntryLo1";
7704 register_name
= "GlobalNumber";
7707 goto cp0_unimplemented
;
7710 case CP0_REGISTER_04
:
7713 gen_helper_mtc0_context(cpu_env
, arg
);
7714 register_name
= "Context";
7717 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7718 register_name
= "ContextConfig";
7719 goto cp0_unimplemented
;
7721 CP0_CHECK(ctx
->ulri
);
7722 tcg_gen_st_tl(arg
, cpu_env
,
7723 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7724 register_name
= "UserLocal";
7727 goto cp0_unimplemented
;
7730 case CP0_REGISTER_05
:
7733 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7734 register_name
= "PageMask";
7737 check_insn(ctx
, ISA_MIPS32R2
);
7738 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7739 register_name
= "PageGrain";
7740 ctx
->base
.is_jmp
= DISAS_STOP
;
7744 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7745 register_name
= "SegCtl0";
7749 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7750 register_name
= "SegCtl1";
7754 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7755 register_name
= "SegCtl2";
7759 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7760 register_name
= "PWBase";
7764 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7765 register_name
= "PWField";
7769 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7770 register_name
= "PWSize";
7773 goto cp0_unimplemented
;
7776 case CP0_REGISTER_06
:
7779 gen_helper_mtc0_wired(cpu_env
, arg
);
7780 register_name
= "Wired";
7783 check_insn(ctx
, ISA_MIPS32R2
);
7784 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7785 register_name
= "SRSConf0";
7788 check_insn(ctx
, ISA_MIPS32R2
);
7789 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7790 register_name
= "SRSConf1";
7793 check_insn(ctx
, ISA_MIPS32R2
);
7794 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7795 register_name
= "SRSConf2";
7798 check_insn(ctx
, ISA_MIPS32R2
);
7799 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7800 register_name
= "SRSConf3";
7803 check_insn(ctx
, ISA_MIPS32R2
);
7804 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7805 register_name
= "SRSConf4";
7809 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7810 register_name
= "PWCtl";
7813 goto cp0_unimplemented
;
7816 case CP0_REGISTER_07
:
7819 check_insn(ctx
, ISA_MIPS32R2
);
7820 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7821 ctx
->base
.is_jmp
= DISAS_STOP
;
7822 register_name
= "HWREna";
7825 goto cp0_unimplemented
;
7828 case CP0_REGISTER_08
:
7832 register_name
= "BadVAddr";
7836 register_name
= "BadInstr";
7840 register_name
= "BadInstrP";
7844 register_name
= "BadInstrX";
7847 goto cp0_unimplemented
;
7850 case CP0_REGISTER_09
:
7853 gen_helper_mtc0_count(cpu_env
, arg
);
7854 register_name
= "Count";
7857 CP0_CHECK(ctx
->saar
);
7858 gen_helper_mtc0_saari(cpu_env
, arg
);
7859 register_name
= "SAARI";
7862 CP0_CHECK(ctx
->saar
);
7863 gen_helper_mtc0_saar(cpu_env
, arg
);
7864 register_name
= "SAAR";
7867 goto cp0_unimplemented
;
7870 case CP0_REGISTER_10
:
7873 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7874 register_name
= "EntryHi";
7877 goto cp0_unimplemented
;
7880 case CP0_REGISTER_11
:
7883 gen_helper_mtc0_compare(cpu_env
, arg
);
7884 register_name
= "Compare";
7886 /* 6,7 are implementation dependent */
7888 goto cp0_unimplemented
;
7891 case CP0_REGISTER_12
:
7894 save_cpu_state(ctx
, 1);
7895 gen_helper_mtc0_status(cpu_env
, arg
);
7896 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7897 gen_save_pc(ctx
->base
.pc_next
+ 4);
7898 ctx
->base
.is_jmp
= DISAS_EXIT
;
7899 register_name
= "Status";
7902 check_insn(ctx
, ISA_MIPS32R2
);
7903 gen_helper_mtc0_intctl(cpu_env
, arg
);
7904 /* Stop translation as we may have switched the execution mode */
7905 ctx
->base
.is_jmp
= DISAS_STOP
;
7906 register_name
= "IntCtl";
7909 check_insn(ctx
, ISA_MIPS32R2
);
7910 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7911 /* Stop translation as we may have switched the execution mode */
7912 ctx
->base
.is_jmp
= DISAS_STOP
;
7913 register_name
= "SRSCtl";
7916 check_insn(ctx
, ISA_MIPS32R2
);
7917 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7918 /* Stop translation as we may have switched the execution mode */
7919 ctx
->base
.is_jmp
= DISAS_STOP
;
7920 register_name
= "SRSMap";
7923 goto cp0_unimplemented
;
7926 case CP0_REGISTER_13
:
7929 save_cpu_state(ctx
, 1);
7930 gen_helper_mtc0_cause(cpu_env
, arg
);
7932 * Stop translation as we may have triggered an interrupt.
7933 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7934 * translated code to check for pending interrupts.
7936 gen_save_pc(ctx
->base
.pc_next
+ 4);
7937 ctx
->base
.is_jmp
= DISAS_EXIT
;
7938 register_name
= "Cause";
7941 goto cp0_unimplemented
;
7944 case CP0_REGISTER_14
:
7947 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7948 register_name
= "EPC";
7951 goto cp0_unimplemented
;
7954 case CP0_REGISTER_15
:
7958 register_name
= "PRid";
7961 check_insn(ctx
, ISA_MIPS32R2
);
7962 gen_helper_mtc0_ebase(cpu_env
, arg
);
7963 register_name
= "EBase";
7966 goto cp0_unimplemented
;
7969 case CP0_REGISTER_16
:
7972 gen_helper_mtc0_config0(cpu_env
, arg
);
7973 register_name
= "Config";
7974 /* Stop translation as we may have switched the execution mode */
7975 ctx
->base
.is_jmp
= DISAS_STOP
;
7978 /* ignored, read only */
7979 register_name
= "Config1";
7982 gen_helper_mtc0_config2(cpu_env
, arg
);
7983 register_name
= "Config2";
7984 /* Stop translation as we may have switched the execution mode */
7985 ctx
->base
.is_jmp
= DISAS_STOP
;
7988 gen_helper_mtc0_config3(cpu_env
, arg
);
7989 register_name
= "Config3";
7990 /* Stop translation as we may have switched the execution mode */
7991 ctx
->base
.is_jmp
= DISAS_STOP
;
7994 gen_helper_mtc0_config4(cpu_env
, arg
);
7995 register_name
= "Config4";
7996 ctx
->base
.is_jmp
= DISAS_STOP
;
7999 gen_helper_mtc0_config5(cpu_env
, arg
);
8000 register_name
= "Config5";
8001 /* Stop translation as we may have switched the execution mode */
8002 ctx
->base
.is_jmp
= DISAS_STOP
;
8004 /* 6,7 are implementation dependent */
8007 register_name
= "Config6";
8011 register_name
= "Config7";
8014 register_name
= "Invalid config selector";
8015 goto cp0_unimplemented
;
8018 case CP0_REGISTER_17
:
8021 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8022 register_name
= "LLAddr";
8025 CP0_CHECK(ctx
->mrp
);
8026 gen_helper_mtc0_maar(cpu_env
, arg
);
8027 register_name
= "MAAR";
8030 CP0_CHECK(ctx
->mrp
);
8031 gen_helper_mtc0_maari(cpu_env
, arg
);
8032 register_name
= "MAARI";
8035 goto cp0_unimplemented
;
8038 case CP0_REGISTER_18
:
8048 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8049 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8050 register_name
= "WatchLo";
8053 goto cp0_unimplemented
;
8056 case CP0_REGISTER_19
:
8066 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8067 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8068 register_name
= "WatchHi";
8071 goto cp0_unimplemented
;
8074 case CP0_REGISTER_20
:
8077 #if defined(TARGET_MIPS64)
8078 check_insn(ctx
, ISA_MIPS3
);
8079 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8080 register_name
= "XContext";
8084 goto cp0_unimplemented
;
8087 case CP0_REGISTER_21
:
8088 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8089 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8092 gen_helper_mtc0_framemask(cpu_env
, arg
);
8093 register_name
= "Framemask";
8096 goto cp0_unimplemented
;
8099 case CP0_REGISTER_22
:
8101 register_name
= "Diagnostic"; /* implementation dependent */
8103 case CP0_REGISTER_23
:
8106 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8107 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8108 gen_save_pc(ctx
->base
.pc_next
+ 4);
8109 ctx
->base
.is_jmp
= DISAS_EXIT
;
8110 register_name
= "Debug";
8113 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
8114 register_name
= "TraceControl";
8115 /* Stop translation as we may have switched the execution mode */
8116 ctx
->base
.is_jmp
= DISAS_STOP
;
8117 goto cp0_unimplemented
;
8119 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
8120 register_name
= "TraceControl2";
8121 /* Stop translation as we may have switched the execution mode */
8122 ctx
->base
.is_jmp
= DISAS_STOP
;
8123 goto cp0_unimplemented
;
8125 /* Stop translation as we may have switched the execution mode */
8126 ctx
->base
.is_jmp
= DISAS_STOP
;
8127 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
8128 register_name
= "UserTraceData";
8129 /* Stop translation as we may have switched the execution mode */
8130 ctx
->base
.is_jmp
= DISAS_STOP
;
8131 goto cp0_unimplemented
;
8133 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
8134 /* Stop translation as we may have switched the execution mode */
8135 ctx
->base
.is_jmp
= DISAS_STOP
;
8136 register_name
= "TraceBPC";
8137 goto cp0_unimplemented
;
8139 goto cp0_unimplemented
;
8142 case CP0_REGISTER_24
:
8146 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8147 register_name
= "DEPC";
8150 goto cp0_unimplemented
;
8153 case CP0_REGISTER_25
:
8156 gen_helper_mtc0_performance0(cpu_env
, arg
);
8157 register_name
= "Performance0";
8160 /* gen_helper_mtc0_performance1(arg); */
8161 register_name
= "Performance1";
8162 goto cp0_unimplemented
;
8164 /* gen_helper_mtc0_performance2(arg); */
8165 register_name
= "Performance2";
8166 goto cp0_unimplemented
;
8168 /* gen_helper_mtc0_performance3(arg); */
8169 register_name
= "Performance3";
8170 goto cp0_unimplemented
;
8172 /* gen_helper_mtc0_performance4(arg); */
8173 register_name
= "Performance4";
8174 goto cp0_unimplemented
;
8176 /* gen_helper_mtc0_performance5(arg); */
8177 register_name
= "Performance5";
8178 goto cp0_unimplemented
;
8180 /* gen_helper_mtc0_performance6(arg); */
8181 register_name
= "Performance6";
8182 goto cp0_unimplemented
;
8184 /* gen_helper_mtc0_performance7(arg); */
8185 register_name
= "Performance7";
8186 goto cp0_unimplemented
;
8188 goto cp0_unimplemented
;
8191 case CP0_REGISTER_26
:
8194 gen_helper_mtc0_errctl(cpu_env
, arg
);
8195 ctx
->base
.is_jmp
= DISAS_STOP
;
8196 register_name
= "ErrCtl";
8199 goto cp0_unimplemented
;
8202 case CP0_REGISTER_27
:
8209 register_name
= "CacheErr";
8212 goto cp0_unimplemented
;
8215 case CP0_REGISTER_28
:
8221 gen_helper_mtc0_taglo(cpu_env
, arg
);
8222 register_name
= "TagLo";
8228 gen_helper_mtc0_datalo(cpu_env
, arg
);
8229 register_name
= "DataLo";
8232 goto cp0_unimplemented
;
8235 case CP0_REGISTER_29
:
8241 gen_helper_mtc0_taghi(cpu_env
, arg
);
8242 register_name
= "TagHi";
8248 gen_helper_mtc0_datahi(cpu_env
, arg
);
8249 register_name
= "DataHi";
8252 register_name
= "invalid sel";
8253 goto cp0_unimplemented
;
8256 case CP0_REGISTER_30
:
8259 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8260 register_name
= "ErrorEPC";
8263 goto cp0_unimplemented
;
8266 case CP0_REGISTER_31
:
8270 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8271 register_name
= "DESAVE";
8279 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8280 tcg_gen_st_tl(arg
, cpu_env
,
8281 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
8282 register_name
= "KScratch";
8285 goto cp0_unimplemented
;
8289 goto cp0_unimplemented
;
8291 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8293 /* For simplicity assume that all writes can cause interrupts. */
8294 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8297 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8298 * translated code to check for pending interrupts.
8300 gen_save_pc(ctx
->base
.pc_next
+ 4);
8301 ctx
->base
.is_jmp
= DISAS_EXIT
;
8306 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8307 register_name
, reg
, sel
);
8310 #if defined(TARGET_MIPS64)
8311 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8313 const char *register_name
= "invalid";
8316 check_insn(ctx
, ISA_MIPS64
);
8320 case CP0_REGISTER_00
:
8323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8324 register_name
= "Index";
8327 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8328 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8329 register_name
= "MVPControl";
8332 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8333 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8334 register_name
= "MVPConf0";
8337 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8338 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8339 register_name
= "MVPConf1";
8343 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8344 register_name
= "VPControl";
8347 goto cp0_unimplemented
;
8350 case CP0_REGISTER_01
:
8353 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8354 gen_helper_mfc0_random(arg
, cpu_env
);
8355 register_name
= "Random";
8358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8360 register_name
= "VPEControl";
8363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8364 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8365 register_name
= "VPEConf0";
8368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8370 register_name
= "VPEConf1";
8373 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8374 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
8375 register_name
= "YQMask";
8378 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
8380 register_name
= "VPESchedule";
8383 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8384 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8385 register_name
= "VPEScheFBack";
8388 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8390 register_name
= "VPEOpt";
8393 goto cp0_unimplemented
;
8396 case CP0_REGISTER_02
:
8399 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
8400 register_name
= "EntryLo0";
8403 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8404 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8405 register_name
= "TCStatus";
8408 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8409 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8410 register_name
= "TCBind";
8413 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8414 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8415 register_name
= "TCRestart";
8418 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8419 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8420 register_name
= "TCHalt";
8423 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8424 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8425 register_name
= "TCContext";
8428 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8429 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8430 register_name
= "TCSchedule";
8433 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8434 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8435 register_name
= "TCScheFBack";
8438 goto cp0_unimplemented
;
8441 case CP0_REGISTER_03
:
8444 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8445 register_name
= "EntryLo1";
8449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8450 register_name
= "GlobalNumber";
8453 goto cp0_unimplemented
;
8456 case CP0_REGISTER_04
:
8459 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8460 register_name
= "Context";
8463 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
8464 register_name
= "ContextConfig";
8465 goto cp0_unimplemented
;
8467 CP0_CHECK(ctx
->ulri
);
8468 tcg_gen_ld_tl(arg
, cpu_env
,
8469 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8470 register_name
= "UserLocal";
8473 goto cp0_unimplemented
;
8476 case CP0_REGISTER_05
:
8479 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8480 register_name
= "PageMask";
8483 check_insn(ctx
, ISA_MIPS32R2
);
8484 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8485 register_name
= "PageGrain";
8489 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8490 register_name
= "SegCtl0";
8494 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8495 register_name
= "SegCtl1";
8499 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8500 register_name
= "SegCtl2";
8504 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8505 register_name
= "PWBase";
8509 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8510 register_name
= "PWField";
8514 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8515 register_name
= "PWSize";
8518 goto cp0_unimplemented
;
8521 case CP0_REGISTER_06
:
8524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8525 register_name
= "Wired";
8528 check_insn(ctx
, ISA_MIPS32R2
);
8529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8530 register_name
= "SRSConf0";
8533 check_insn(ctx
, ISA_MIPS32R2
);
8534 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8535 register_name
= "SRSConf1";
8538 check_insn(ctx
, ISA_MIPS32R2
);
8539 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8540 register_name
= "SRSConf2";
8543 check_insn(ctx
, ISA_MIPS32R2
);
8544 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8545 register_name
= "SRSConf3";
8548 check_insn(ctx
, ISA_MIPS32R2
);
8549 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8550 register_name
= "SRSConf4";
8554 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8555 register_name
= "PWCtl";
8558 goto cp0_unimplemented
;
8561 case CP0_REGISTER_07
:
8564 check_insn(ctx
, ISA_MIPS32R2
);
8565 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8566 register_name
= "HWREna";
8569 goto cp0_unimplemented
;
8572 case CP0_REGISTER_08
:
8575 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8576 register_name
= "BadVAddr";
8580 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8581 register_name
= "BadInstr";
8585 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8586 register_name
= "BadInstrP";
8590 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8591 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8592 register_name
= "BadInstrX";
8595 goto cp0_unimplemented
;
8598 case CP0_REGISTER_09
:
8601 /* Mark as an IO operation because we read the time. */
8602 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8605 gen_helper_mfc0_count(arg
, cpu_env
);
8606 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8610 * Break the TB to be able to take timer interrupts immediately
8611 * after reading count. DISAS_STOP isn't sufficient, we need to
8612 * ensure we break completely out of translated code.
8614 gen_save_pc(ctx
->base
.pc_next
+ 4);
8615 ctx
->base
.is_jmp
= DISAS_EXIT
;
8616 register_name
= "Count";
8619 CP0_CHECK(ctx
->saar
);
8620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8621 register_name
= "SAARI";
8624 CP0_CHECK(ctx
->saar
);
8625 gen_helper_dmfc0_saar(arg
, cpu_env
);
8626 register_name
= "SAAR";
8629 goto cp0_unimplemented
;
8632 case CP0_REGISTER_10
:
8635 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8636 register_name
= "EntryHi";
8639 goto cp0_unimplemented
;
8642 case CP0_REGISTER_11
:
8645 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8646 register_name
= "Compare";
8648 /* 6,7 are implementation dependent */
8650 goto cp0_unimplemented
;
8653 case CP0_REGISTER_12
:
8656 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8657 register_name
= "Status";
8660 check_insn(ctx
, ISA_MIPS32R2
);
8661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8662 register_name
= "IntCtl";
8665 check_insn(ctx
, ISA_MIPS32R2
);
8666 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8667 register_name
= "SRSCtl";
8670 check_insn(ctx
, ISA_MIPS32R2
);
8671 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8672 register_name
= "SRSMap";
8675 goto cp0_unimplemented
;
8678 case CP0_REGISTER_13
:
8681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8682 register_name
= "Cause";
8685 goto cp0_unimplemented
;
8688 case CP0_REGISTER_14
:
8691 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8692 register_name
= "EPC";
8695 goto cp0_unimplemented
;
8698 case CP0_REGISTER_15
:
8701 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8702 register_name
= "PRid";
8705 check_insn(ctx
, ISA_MIPS32R2
);
8706 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8707 register_name
= "EBase";
8710 check_insn(ctx
, ISA_MIPS32R2
);
8711 CP0_CHECK(ctx
->cmgcr
);
8712 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8713 register_name
= "CMGCRBase";
8716 goto cp0_unimplemented
;
8719 case CP0_REGISTER_16
:
8722 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8723 register_name
= "Config";
8726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8727 register_name
= "Config1";
8730 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8731 register_name
= "Config2";
8734 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8735 register_name
= "Config3";
8738 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8739 register_name
= "Config4";
8742 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8743 register_name
= "Config5";
8745 /* 6,7 are implementation dependent */
8747 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8748 register_name
= "Config6";
8751 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8752 register_name
= "Config7";
8755 goto cp0_unimplemented
;
8758 case CP0_REGISTER_17
:
8761 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8762 register_name
= "LLAddr";
8765 CP0_CHECK(ctx
->mrp
);
8766 gen_helper_dmfc0_maar(arg
, cpu_env
);
8767 register_name
= "MAAR";
8770 CP0_CHECK(ctx
->mrp
);
8771 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8772 register_name
= "MAARI";
8775 goto cp0_unimplemented
;
8778 case CP0_REGISTER_18
:
8788 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8789 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8790 register_name
= "WatchLo";
8793 goto cp0_unimplemented
;
8796 case CP0_REGISTER_19
:
8806 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8807 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
8808 register_name
= "WatchHi";
8811 goto cp0_unimplemented
;
8814 case CP0_REGISTER_20
:
8817 check_insn(ctx
, ISA_MIPS3
);
8818 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8819 register_name
= "XContext";
8822 goto cp0_unimplemented
;
8825 case CP0_REGISTER_21
:
8826 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8827 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
8830 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8831 register_name
= "Framemask";
8834 goto cp0_unimplemented
;
8837 case CP0_REGISTER_22
:
8838 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8839 register_name
= "'Diagnostic"; /* implementation dependent */
8841 case CP0_REGISTER_23
:
8844 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8845 register_name
= "Debug";
8848 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
8849 register_name
= "TraceControl";
8850 goto cp0_unimplemented
;
8852 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
8853 register_name
= "TraceControl2";
8854 goto cp0_unimplemented
;
8856 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
8857 register_name
= "UserTraceData";
8858 goto cp0_unimplemented
;
8860 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
8861 register_name
= "TraceBPC";
8862 goto cp0_unimplemented
;
8864 goto cp0_unimplemented
;
8867 case CP0_REGISTER_24
:
8871 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8872 register_name
= "DEPC";
8875 goto cp0_unimplemented
;
8878 case CP0_REGISTER_25
:
8881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8882 register_name
= "Performance0";
8885 /* gen_helper_dmfc0_performance1(arg); */
8886 register_name
= "Performance1";
8887 goto cp0_unimplemented
;
8889 /* gen_helper_dmfc0_performance2(arg); */
8890 register_name
= "Performance2";
8891 goto cp0_unimplemented
;
8893 /* gen_helper_dmfc0_performance3(arg); */
8894 register_name
= "Performance3";
8895 goto cp0_unimplemented
;
8897 /* gen_helper_dmfc0_performance4(arg); */
8898 register_name
= "Performance4";
8899 goto cp0_unimplemented
;
8901 /* gen_helper_dmfc0_performance5(arg); */
8902 register_name
= "Performance5";
8903 goto cp0_unimplemented
;
8905 /* gen_helper_dmfc0_performance6(arg); */
8906 register_name
= "Performance6";
8907 goto cp0_unimplemented
;
8909 /* gen_helper_dmfc0_performance7(arg); */
8910 register_name
= "Performance7";
8911 goto cp0_unimplemented
;
8913 goto cp0_unimplemented
;
8916 case CP0_REGISTER_26
:
8919 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8920 register_name
= "ErrCtl";
8923 goto cp0_unimplemented
;
8926 case CP0_REGISTER_27
:
8933 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8934 register_name
= "CacheErr";
8937 goto cp0_unimplemented
;
8940 case CP0_REGISTER_28
:
8946 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8947 register_name
= "TagLo";
8953 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8954 register_name
= "DataLo";
8957 goto cp0_unimplemented
;
8960 case CP0_REGISTER_29
:
8966 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8967 register_name
= "TagHi";
8973 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8974 register_name
= "DataHi";
8977 goto cp0_unimplemented
;
8980 case CP0_REGISTER_30
:
8983 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8984 register_name
= "ErrorEPC";
8987 goto cp0_unimplemented
;
8990 case CP0_REGISTER_31
:
8994 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8995 register_name
= "DESAVE";
9003 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9004 tcg_gen_ld_tl(arg
, cpu_env
,
9005 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9006 register_name
= "KScratch";
9009 goto cp0_unimplemented
;
9013 goto cp0_unimplemented
;
9015 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
9019 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
9020 register_name
, reg
, sel
);
9021 gen_mfc0_unimplemented(ctx
, arg
);
9024 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
9026 const char *register_name
= "invalid";
9029 check_insn(ctx
, ISA_MIPS64
);
9032 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9037 case CP0_REGISTER_00
:
9040 gen_helper_mtc0_index(cpu_env
, arg
);
9041 register_name
= "Index";
9044 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9045 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
9046 register_name
= "MVPControl";
9049 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9051 register_name
= "MVPConf0";
9054 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9056 register_name
= "MVPConf1";
9061 register_name
= "VPControl";
9064 goto cp0_unimplemented
;
9067 case CP0_REGISTER_01
:
9071 register_name
= "Random";
9074 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9075 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9076 register_name
= "VPEControl";
9079 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9080 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9081 register_name
= "VPEConf0";
9084 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9085 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9086 register_name
= "VPEConf1";
9089 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9090 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9091 register_name
= "YQMask";
9094 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9095 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
9096 register_name
= "VPESchedule";
9099 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9100 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9101 register_name
= "VPEScheFBack";
9104 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9105 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9106 register_name
= "VPEOpt";
9109 goto cp0_unimplemented
;
9112 case CP0_REGISTER_02
:
9115 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9116 register_name
= "EntryLo0";
9119 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9120 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9121 register_name
= "TCStatus";
9124 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9125 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9126 register_name
= "TCBind";
9129 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9130 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9131 register_name
= "TCRestart";
9134 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9135 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9136 register_name
= "TCHalt";
9139 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9140 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9141 register_name
= "TCContext";
9144 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9145 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9146 register_name
= "TCSchedule";
9149 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9150 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9151 register_name
= "TCScheFBack";
9154 goto cp0_unimplemented
;
9157 case CP0_REGISTER_03
:
9160 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9161 register_name
= "EntryLo1";
9166 register_name
= "GlobalNumber";
9169 goto cp0_unimplemented
;
9172 case CP0_REGISTER_04
:
9175 gen_helper_mtc0_context(cpu_env
, arg
);
9176 register_name
= "Context";
9179 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
9180 register_name
= "ContextConfig";
9181 goto cp0_unimplemented
;
9183 CP0_CHECK(ctx
->ulri
);
9184 tcg_gen_st_tl(arg
, cpu_env
,
9185 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9186 register_name
= "UserLocal";
9189 goto cp0_unimplemented
;
9192 case CP0_REGISTER_05
:
9195 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9196 register_name
= "PageMask";
9199 check_insn(ctx
, ISA_MIPS32R2
);
9200 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9201 register_name
= "PageGrain";
9205 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9206 register_name
= "SegCtl0";
9210 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9211 register_name
= "SegCtl1";
9215 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9216 register_name
= "SegCtl2";
9220 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9221 register_name
= "PWBase";
9225 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9226 register_name
= "PWField";
9230 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9231 register_name
= "PWSize";
9234 goto cp0_unimplemented
;
9237 case CP0_REGISTER_06
:
9240 gen_helper_mtc0_wired(cpu_env
, arg
);
9241 register_name
= "Wired";
9244 check_insn(ctx
, ISA_MIPS32R2
);
9245 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9246 register_name
= "SRSConf0";
9249 check_insn(ctx
, ISA_MIPS32R2
);
9250 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9251 register_name
= "SRSConf1";
9254 check_insn(ctx
, ISA_MIPS32R2
);
9255 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9256 register_name
= "SRSConf2";
9259 check_insn(ctx
, ISA_MIPS32R2
);
9260 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9261 register_name
= "SRSConf3";
9264 check_insn(ctx
, ISA_MIPS32R2
);
9265 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9266 register_name
= "SRSConf4";
9270 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9271 register_name
= "PWCtl";
9274 goto cp0_unimplemented
;
9277 case CP0_REGISTER_07
:
9280 check_insn(ctx
, ISA_MIPS32R2
);
9281 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9282 ctx
->base
.is_jmp
= DISAS_STOP
;
9283 register_name
= "HWREna";
9286 goto cp0_unimplemented
;
9289 case CP0_REGISTER_08
:
9293 register_name
= "BadVAddr";
9297 register_name
= "BadInstr";
9301 register_name
= "BadInstrP";
9305 register_name
= "BadInstrX";
9308 goto cp0_unimplemented
;
9311 case CP0_REGISTER_09
:
9314 gen_helper_mtc0_count(cpu_env
, arg
);
9315 register_name
= "Count";
9318 CP0_CHECK(ctx
->saar
);
9319 gen_helper_mtc0_saari(cpu_env
, arg
);
9320 register_name
= "SAARI";
9323 CP0_CHECK(ctx
->saar
);
9324 gen_helper_mtc0_saar(cpu_env
, arg
);
9325 register_name
= "SAAR";
9328 goto cp0_unimplemented
;
9330 /* Stop translation as we may have switched the execution mode */
9331 ctx
->base
.is_jmp
= DISAS_STOP
;
9333 case CP0_REGISTER_10
:
9336 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9337 register_name
= "EntryHi";
9340 goto cp0_unimplemented
;
9343 case CP0_REGISTER_11
:
9346 gen_helper_mtc0_compare(cpu_env
, arg
);
9347 register_name
= "Compare";
9349 /* 6,7 are implementation dependent */
9351 goto cp0_unimplemented
;
9353 /* Stop translation as we may have switched the execution mode */
9354 ctx
->base
.is_jmp
= DISAS_STOP
;
9356 case CP0_REGISTER_12
:
9359 save_cpu_state(ctx
, 1);
9360 gen_helper_mtc0_status(cpu_env
, arg
);
9361 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9362 gen_save_pc(ctx
->base
.pc_next
+ 4);
9363 ctx
->base
.is_jmp
= DISAS_EXIT
;
9364 register_name
= "Status";
9367 check_insn(ctx
, ISA_MIPS32R2
);
9368 gen_helper_mtc0_intctl(cpu_env
, arg
);
9369 /* Stop translation as we may have switched the execution mode */
9370 ctx
->base
.is_jmp
= DISAS_STOP
;
9371 register_name
= "IntCtl";
9374 check_insn(ctx
, ISA_MIPS32R2
);
9375 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9376 /* Stop translation as we may have switched the execution mode */
9377 ctx
->base
.is_jmp
= DISAS_STOP
;
9378 register_name
= "SRSCtl";
9381 check_insn(ctx
, ISA_MIPS32R2
);
9382 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9383 /* Stop translation as we may have switched the execution mode */
9384 ctx
->base
.is_jmp
= DISAS_STOP
;
9385 register_name
= "SRSMap";
9388 goto cp0_unimplemented
;
9391 case CP0_REGISTER_13
:
9394 save_cpu_state(ctx
, 1);
9395 gen_helper_mtc0_cause(cpu_env
, arg
);
9397 * Stop translation as we may have triggered an interrupt.
9398 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9399 * translated code to check for pending interrupts.
9401 gen_save_pc(ctx
->base
.pc_next
+ 4);
9402 ctx
->base
.is_jmp
= DISAS_EXIT
;
9403 register_name
= "Cause";
9406 goto cp0_unimplemented
;
9409 case CP0_REGISTER_14
:
9412 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9413 register_name
= "EPC";
9416 goto cp0_unimplemented
;
9419 case CP0_REGISTER_15
:
9423 register_name
= "PRid";
9426 check_insn(ctx
, ISA_MIPS32R2
);
9427 gen_helper_mtc0_ebase(cpu_env
, arg
);
9428 register_name
= "EBase";
9431 goto cp0_unimplemented
;
9434 case CP0_REGISTER_16
:
9437 gen_helper_mtc0_config0(cpu_env
, arg
);
9438 register_name
= "Config";
9439 /* Stop translation as we may have switched the execution mode */
9440 ctx
->base
.is_jmp
= DISAS_STOP
;
9443 /* ignored, read only */
9444 register_name
= "Config1";
9447 gen_helper_mtc0_config2(cpu_env
, arg
);
9448 register_name
= "Config2";
9449 /* Stop translation as we may have switched the execution mode */
9450 ctx
->base
.is_jmp
= DISAS_STOP
;
9453 gen_helper_mtc0_config3(cpu_env
, arg
);
9454 register_name
= "Config3";
9455 /* Stop translation as we may have switched the execution mode */
9456 ctx
->base
.is_jmp
= DISAS_STOP
;
9459 /* currently ignored */
9460 register_name
= "Config4";
9463 gen_helper_mtc0_config5(cpu_env
, arg
);
9464 register_name
= "Config5";
9465 /* Stop translation as we may have switched the execution mode */
9466 ctx
->base
.is_jmp
= DISAS_STOP
;
9468 /* 6,7 are implementation dependent */
9470 register_name
= "Invalid config selector";
9471 goto cp0_unimplemented
;
9474 case CP0_REGISTER_17
:
9477 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9478 register_name
= "LLAddr";
9481 CP0_CHECK(ctx
->mrp
);
9482 gen_helper_mtc0_maar(cpu_env
, arg
);
9483 register_name
= "MAAR";
9486 CP0_CHECK(ctx
->mrp
);
9487 gen_helper_mtc0_maari(cpu_env
, arg
);
9488 register_name
= "MAARI";
9491 goto cp0_unimplemented
;
9494 case CP0_REGISTER_18
:
9504 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9505 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9506 register_name
= "WatchLo";
9509 goto cp0_unimplemented
;
9512 case CP0_REGISTER_19
:
9522 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9523 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9524 register_name
= "WatchHi";
9527 goto cp0_unimplemented
;
9530 case CP0_REGISTER_20
:
9533 check_insn(ctx
, ISA_MIPS3
);
9534 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9535 register_name
= "XContext";
9538 goto cp0_unimplemented
;
9541 case CP0_REGISTER_21
:
9542 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9543 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
9546 gen_helper_mtc0_framemask(cpu_env
, arg
);
9547 register_name
= "Framemask";
9550 goto cp0_unimplemented
;
9553 case CP0_REGISTER_22
:
9555 register_name
= "Diagnostic"; /* implementation dependent */
9557 case CP0_REGISTER_23
:
9560 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9561 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9562 gen_save_pc(ctx
->base
.pc_next
+ 4);
9563 ctx
->base
.is_jmp
= DISAS_EXIT
;
9564 register_name
= "Debug";
9567 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
9568 /* Stop translation as we may have switched the execution mode */
9569 ctx
->base
.is_jmp
= DISAS_STOP
;
9570 register_name
= "TraceControl";
9571 goto cp0_unimplemented
;
9573 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
9574 /* Stop translation as we may have switched the execution mode */
9575 ctx
->base
.is_jmp
= DISAS_STOP
;
9576 register_name
= "TraceControl2";
9577 goto cp0_unimplemented
;
9579 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
9580 /* Stop translation as we may have switched the execution mode */
9581 ctx
->base
.is_jmp
= DISAS_STOP
;
9582 register_name
= "UserTraceData";
9583 goto cp0_unimplemented
;
9585 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
9586 /* Stop translation as we may have switched the execution mode */
9587 ctx
->base
.is_jmp
= DISAS_STOP
;
9588 register_name
= "TraceBPC";
9589 goto cp0_unimplemented
;
9591 goto cp0_unimplemented
;
9594 case CP0_REGISTER_24
:
9598 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9599 register_name
= "DEPC";
9602 goto cp0_unimplemented
;
9605 case CP0_REGISTER_25
:
9608 gen_helper_mtc0_performance0(cpu_env
, arg
);
9609 register_name
= "Performance0";
9612 /* gen_helper_mtc0_performance1(cpu_env, arg); */
9613 register_name
= "Performance1";
9614 goto cp0_unimplemented
;
9616 /* gen_helper_mtc0_performance2(cpu_env, arg); */
9617 register_name
= "Performance2";
9618 goto cp0_unimplemented
;
9620 /* gen_helper_mtc0_performance3(cpu_env, arg); */
9621 register_name
= "Performance3";
9622 goto cp0_unimplemented
;
9624 /* gen_helper_mtc0_performance4(cpu_env, arg); */
9625 register_name
= "Performance4";
9626 goto cp0_unimplemented
;
9628 /* gen_helper_mtc0_performance5(cpu_env, arg); */
9629 register_name
= "Performance5";
9630 goto cp0_unimplemented
;
9632 /* gen_helper_mtc0_performance6(cpu_env, arg); */
9633 register_name
= "Performance6";
9634 goto cp0_unimplemented
;
9636 /* gen_helper_mtc0_performance7(cpu_env, arg); */
9637 register_name
= "Performance7";
9638 goto cp0_unimplemented
;
9640 goto cp0_unimplemented
;
9643 case CP0_REGISTER_26
:
9646 gen_helper_mtc0_errctl(cpu_env
, arg
);
9647 ctx
->base
.is_jmp
= DISAS_STOP
;
9648 register_name
= "ErrCtl";
9651 goto cp0_unimplemented
;
9654 case CP0_REGISTER_27
:
9661 register_name
= "CacheErr";
9664 goto cp0_unimplemented
;
9667 case CP0_REGISTER_28
:
9673 gen_helper_mtc0_taglo(cpu_env
, arg
);
9674 register_name
= "TagLo";
9680 gen_helper_mtc0_datalo(cpu_env
, arg
);
9681 register_name
= "DataLo";
9684 goto cp0_unimplemented
;
9687 case CP0_REGISTER_29
:
9693 gen_helper_mtc0_taghi(cpu_env
, arg
);
9694 register_name
= "TagHi";
9700 gen_helper_mtc0_datahi(cpu_env
, arg
);
9701 register_name
= "DataHi";
9704 register_name
= "invalid sel";
9705 goto cp0_unimplemented
;
9708 case CP0_REGISTER_30
:
9711 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9712 register_name
= "ErrorEPC";
9715 goto cp0_unimplemented
;
9718 case CP0_REGISTER_31
:
9722 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9723 register_name
= "DESAVE";
9731 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9732 tcg_gen_st_tl(arg
, cpu_env
,
9733 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
9734 register_name
= "KScratch";
9737 goto cp0_unimplemented
;
9741 goto cp0_unimplemented
;
9743 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9745 /* For simplicity assume that all writes can cause interrupts. */
9746 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9749 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9750 * translated code to check for pending interrupts.
9752 gen_save_pc(ctx
->base
.pc_next
+ 4);
9753 ctx
->base
.is_jmp
= DISAS_EXIT
;
9758 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9759 register_name
, reg
, sel
);
9761 #endif /* TARGET_MIPS64 */
9763 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9764 int u
, int sel
, int h
)
9766 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9767 TCGv t0
= tcg_temp_local_new();
9769 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9770 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9771 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9772 tcg_gen_movi_tl(t0
, -1);
9773 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9774 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9775 tcg_gen_movi_tl(t0
, -1);
9776 } else if (u
== 0) {
9781 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9784 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9794 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9797 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9800 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9803 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9806 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9809 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9812 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9815 gen_mfc0(ctx
, t0
, rt
, sel
);
9822 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9825 gen_mfc0(ctx
, t0
, rt
, sel
);
9831 gen_helper_mftc0_status(t0
, cpu_env
);
9834 gen_mfc0(ctx
, t0
, rt
, sel
);
9840 gen_helper_mftc0_cause(t0
, cpu_env
);
9850 gen_helper_mftc0_epc(t0
, cpu_env
);
9860 gen_helper_mftc0_ebase(t0
, cpu_env
);
9877 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9887 gen_helper_mftc0_debug(t0
, cpu_env
);
9890 gen_mfc0(ctx
, t0
, rt
, sel
);
9895 gen_mfc0(ctx
, t0
, rt
, sel
);
9897 } else switch (sel
) {
9898 /* GPR registers. */
9900 gen_helper_1e0i(mftgpr
, t0
, rt
);
9902 /* Auxiliary CPU registers */
9906 gen_helper_1e0i(mftlo
, t0
, 0);
9909 gen_helper_1e0i(mfthi
, t0
, 0);
9912 gen_helper_1e0i(mftacx
, t0
, 0);
9915 gen_helper_1e0i(mftlo
, t0
, 1);
9918 gen_helper_1e0i(mfthi
, t0
, 1);
9921 gen_helper_1e0i(mftacx
, t0
, 1);
9924 gen_helper_1e0i(mftlo
, t0
, 2);
9927 gen_helper_1e0i(mfthi
, t0
, 2);
9930 gen_helper_1e0i(mftacx
, t0
, 2);
9933 gen_helper_1e0i(mftlo
, t0
, 3);
9936 gen_helper_1e0i(mfthi
, t0
, 3);
9939 gen_helper_1e0i(mftacx
, t0
, 3);
9942 gen_helper_mftdsp(t0
, cpu_env
);
9948 /* Floating point (COP1). */
9950 /* XXX: For now we support only a single FPU context. */
9952 TCGv_i32 fp0
= tcg_temp_new_i32();
9954 gen_load_fpr32(ctx
, fp0
, rt
);
9955 tcg_gen_ext_i32_tl(t0
, fp0
);
9956 tcg_temp_free_i32(fp0
);
9958 TCGv_i32 fp0
= tcg_temp_new_i32();
9960 gen_load_fpr32h(ctx
, fp0
, rt
);
9961 tcg_gen_ext_i32_tl(t0
, fp0
);
9962 tcg_temp_free_i32(fp0
);
9966 /* XXX: For now we support only a single FPU context. */
9967 gen_helper_1e0i(cfc1
, t0
, rt
);
9969 /* COP2: Not implemented. */
9976 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
9977 gen_store_gpr(t0
, rd
);
9983 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
9984 generate_exception_end(ctx
, EXCP_RI
);
9987 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
9988 int u
, int sel
, int h
)
9990 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9991 TCGv t0
= tcg_temp_local_new();
9993 gen_load_gpr(t0
, rt
);
9994 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9995 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9996 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9999 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
10000 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
10003 } else if (u
== 0) {
10008 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
10011 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
10021 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
10024 gen_helper_mttc0_tcbind(cpu_env
, t0
);
10027 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
10030 gen_helper_mttc0_tchalt(cpu_env
, t0
);
10033 gen_helper_mttc0_tccontext(cpu_env
, t0
);
10036 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
10039 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
10042 gen_mtc0(ctx
, t0
, rd
, sel
);
10049 gen_helper_mttc0_entryhi(cpu_env
, t0
);
10052 gen_mtc0(ctx
, t0
, rd
, sel
);
10058 gen_helper_mttc0_status(cpu_env
, t0
);
10061 gen_mtc0(ctx
, t0
, rd
, sel
);
10067 gen_helper_mttc0_cause(cpu_env
, t0
);
10077 gen_helper_mttc0_ebase(cpu_env
, t0
);
10087 gen_helper_mttc0_debug(cpu_env
, t0
);
10090 gen_mtc0(ctx
, t0
, rd
, sel
);
10095 gen_mtc0(ctx
, t0
, rd
, sel
);
10097 } else switch (sel
) {
10098 /* GPR registers. */
10100 gen_helper_0e1i(mttgpr
, t0
, rd
);
10102 /* Auxiliary CPU registers */
10106 gen_helper_0e1i(mttlo
, t0
, 0);
10109 gen_helper_0e1i(mtthi
, t0
, 0);
10112 gen_helper_0e1i(mttacx
, t0
, 0);
10115 gen_helper_0e1i(mttlo
, t0
, 1);
10118 gen_helper_0e1i(mtthi
, t0
, 1);
10121 gen_helper_0e1i(mttacx
, t0
, 1);
10124 gen_helper_0e1i(mttlo
, t0
, 2);
10127 gen_helper_0e1i(mtthi
, t0
, 2);
10130 gen_helper_0e1i(mttacx
, t0
, 2);
10133 gen_helper_0e1i(mttlo
, t0
, 3);
10136 gen_helper_0e1i(mtthi
, t0
, 3);
10139 gen_helper_0e1i(mttacx
, t0
, 3);
10142 gen_helper_mttdsp(cpu_env
, t0
);
10148 /* Floating point (COP1). */
10150 /* XXX: For now we support only a single FPU context. */
10152 TCGv_i32 fp0
= tcg_temp_new_i32();
10154 tcg_gen_trunc_tl_i32(fp0
, t0
);
10155 gen_store_fpr32(ctx
, fp0
, rd
);
10156 tcg_temp_free_i32(fp0
);
10158 TCGv_i32 fp0
= tcg_temp_new_i32();
10160 tcg_gen_trunc_tl_i32(fp0
, t0
);
10161 gen_store_fpr32h(ctx
, fp0
, rd
);
10162 tcg_temp_free_i32(fp0
);
10166 /* XXX: For now we support only a single FPU context. */
10168 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10170 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10171 tcg_temp_free_i32(fs_tmp
);
10173 /* Stop translation as we may have changed hflags */
10174 ctx
->base
.is_jmp
= DISAS_STOP
;
10176 /* COP2: Not implemented. */
10183 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10189 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10190 generate_exception_end(ctx
, EXCP_RI
);
10193 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
10196 const char *opn
= "ldst";
10198 check_cp0_enabled(ctx
);
10202 /* Treat as NOP. */
10205 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10210 TCGv t0
= tcg_temp_new();
10212 gen_load_gpr(t0
, rt
);
10213 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10218 #if defined(TARGET_MIPS64)
10220 check_insn(ctx
, ISA_MIPS3
);
10222 /* Treat as NOP. */
10225 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10229 check_insn(ctx
, ISA_MIPS3
);
10231 TCGv t0
= tcg_temp_new();
10233 gen_load_gpr(t0
, rt
);
10234 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10243 /* Treat as NOP. */
10246 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10252 TCGv t0
= tcg_temp_new();
10253 gen_load_gpr(t0
, rt
);
10254 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10260 check_cp0_enabled(ctx
);
10262 /* Treat as NOP. */
10265 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10266 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10270 check_cp0_enabled(ctx
);
10271 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10272 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10277 if (!env
->tlb
->helper_tlbwi
) {
10280 gen_helper_tlbwi(cpu_env
);
10284 if (ctx
->ie
>= 2) {
10285 if (!env
->tlb
->helper_tlbinv
) {
10288 gen_helper_tlbinv(cpu_env
);
10289 } /* treat as nop if TLBINV not supported */
10293 if (ctx
->ie
>= 2) {
10294 if (!env
->tlb
->helper_tlbinvf
) {
10297 gen_helper_tlbinvf(cpu_env
);
10298 } /* treat as nop if TLBINV not supported */
10302 if (!env
->tlb
->helper_tlbwr
) {
10305 gen_helper_tlbwr(cpu_env
);
10309 if (!env
->tlb
->helper_tlbp
) {
10312 gen_helper_tlbp(cpu_env
);
10316 if (!env
->tlb
->helper_tlbr
) {
10319 gen_helper_tlbr(cpu_env
);
10321 case OPC_ERET
: /* OPC_ERETNC */
10322 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10323 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10326 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10327 if (ctx
->opcode
& (1 << bit_shift
)) {
10330 check_insn(ctx
, ISA_MIPS32R5
);
10331 gen_helper_eretnc(cpu_env
);
10335 check_insn(ctx
, ISA_MIPS2
);
10336 gen_helper_eret(cpu_env
);
10338 ctx
->base
.is_jmp
= DISAS_EXIT
;
10343 check_insn(ctx
, ISA_MIPS32
);
10344 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10345 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10348 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10350 generate_exception_end(ctx
, EXCP_RI
);
10352 gen_helper_deret(cpu_env
);
10353 ctx
->base
.is_jmp
= DISAS_EXIT
;
10358 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
10359 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
10360 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10363 /* If we get an exception, we want to restart at next instruction */
10364 ctx
->base
.pc_next
+= 4;
10365 save_cpu_state(ctx
, 1);
10366 ctx
->base
.pc_next
-= 4;
10367 gen_helper_wait(cpu_env
);
10368 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10373 generate_exception_end(ctx
, EXCP_RI
);
10376 (void)opn
; /* avoid a compiler warning */
10378 #endif /* !CONFIG_USER_ONLY */
10380 /* CP1 Branches (before delay slot) */
10381 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10382 int32_t cc
, int32_t offset
)
10384 target_ulong btarget
;
10385 TCGv_i32 t0
= tcg_temp_new_i32();
10387 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10388 generate_exception_end(ctx
, EXCP_RI
);
10393 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
10396 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10400 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10401 tcg_gen_not_i32(t0
, t0
);
10402 tcg_gen_andi_i32(t0
, t0
, 1);
10403 tcg_gen_extu_i32_tl(bcond
, t0
);
10406 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10407 tcg_gen_not_i32(t0
, t0
);
10408 tcg_gen_andi_i32(t0
, t0
, 1);
10409 tcg_gen_extu_i32_tl(bcond
, t0
);
10412 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10413 tcg_gen_andi_i32(t0
, t0
, 1);
10414 tcg_gen_extu_i32_tl(bcond
, t0
);
10417 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10418 tcg_gen_andi_i32(t0
, t0
, 1);
10419 tcg_gen_extu_i32_tl(bcond
, t0
);
10421 ctx
->hflags
|= MIPS_HFLAG_BL
;
10425 TCGv_i32 t1
= tcg_temp_new_i32();
10426 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10427 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10428 tcg_gen_nand_i32(t0
, t0
, t1
);
10429 tcg_temp_free_i32(t1
);
10430 tcg_gen_andi_i32(t0
, t0
, 1);
10431 tcg_gen_extu_i32_tl(bcond
, t0
);
10436 TCGv_i32 t1
= tcg_temp_new_i32();
10437 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10438 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10439 tcg_gen_or_i32(t0
, t0
, t1
);
10440 tcg_temp_free_i32(t1
);
10441 tcg_gen_andi_i32(t0
, t0
, 1);
10442 tcg_gen_extu_i32_tl(bcond
, t0
);
10447 TCGv_i32 t1
= tcg_temp_new_i32();
10448 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10449 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10450 tcg_gen_and_i32(t0
, t0
, t1
);
10451 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10452 tcg_gen_and_i32(t0
, t0
, t1
);
10453 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10454 tcg_gen_nand_i32(t0
, t0
, t1
);
10455 tcg_temp_free_i32(t1
);
10456 tcg_gen_andi_i32(t0
, t0
, 1);
10457 tcg_gen_extu_i32_tl(bcond
, t0
);
10462 TCGv_i32 t1
= tcg_temp_new_i32();
10463 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10464 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
10465 tcg_gen_or_i32(t0
, t0
, t1
);
10466 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
10467 tcg_gen_or_i32(t0
, t0
, t1
);
10468 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
10469 tcg_gen_or_i32(t0
, t0
, t1
);
10470 tcg_temp_free_i32(t1
);
10471 tcg_gen_andi_i32(t0
, t0
, 1);
10472 tcg_gen_extu_i32_tl(bcond
, t0
);
10475 ctx
->hflags
|= MIPS_HFLAG_BC
;
10478 MIPS_INVAL("cp1 cond branch");
10479 generate_exception_end(ctx
, EXCP_RI
);
10482 ctx
->btarget
= btarget
;
10483 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10485 tcg_temp_free_i32(t0
);
10488 /* R6 CP1 Branches */
10489 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10490 int32_t ft
, int32_t offset
,
10491 int delayslot_size
)
10493 target_ulong btarget
;
10494 TCGv_i64 t0
= tcg_temp_new_i64();
10496 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10497 #ifdef MIPS_DEBUG_DISAS
10498 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10499 "\n", ctx
->base
.pc_next
);
10501 generate_exception_end(ctx
, EXCP_RI
);
10505 gen_load_fpr64(ctx
, t0
, ft
);
10506 tcg_gen_andi_i64(t0
, t0
, 1);
10508 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10512 tcg_gen_xori_i64(t0
, t0
, 1);
10513 ctx
->hflags
|= MIPS_HFLAG_BC
;
10516 /* t0 already set */
10517 ctx
->hflags
|= MIPS_HFLAG_BC
;
10520 MIPS_INVAL("cp1 cond branch");
10521 generate_exception_end(ctx
, EXCP_RI
);
10525 tcg_gen_trunc_i64_tl(bcond
, t0
);
10527 ctx
->btarget
= btarget
;
10529 switch (delayslot_size
) {
10531 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10534 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10539 tcg_temp_free_i64(t0
);
10542 /* Coprocessor 1 (FPU) */
10544 #define FOP(func, fmt) (((fmt) << 21) | (func))
10547 OPC_ADD_S
= FOP(0, FMT_S
),
10548 OPC_SUB_S
= FOP(1, FMT_S
),
10549 OPC_MUL_S
= FOP(2, FMT_S
),
10550 OPC_DIV_S
= FOP(3, FMT_S
),
10551 OPC_SQRT_S
= FOP(4, FMT_S
),
10552 OPC_ABS_S
= FOP(5, FMT_S
),
10553 OPC_MOV_S
= FOP(6, FMT_S
),
10554 OPC_NEG_S
= FOP(7, FMT_S
),
10555 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10556 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10557 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10558 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10559 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10560 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10561 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10562 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10563 OPC_SEL_S
= FOP(16, FMT_S
),
10564 OPC_MOVCF_S
= FOP(17, FMT_S
),
10565 OPC_MOVZ_S
= FOP(18, FMT_S
),
10566 OPC_MOVN_S
= FOP(19, FMT_S
),
10567 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10568 OPC_RECIP_S
= FOP(21, FMT_S
),
10569 OPC_RSQRT_S
= FOP(22, FMT_S
),
10570 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10571 OPC_MADDF_S
= FOP(24, FMT_S
),
10572 OPC_MSUBF_S
= FOP(25, FMT_S
),
10573 OPC_RINT_S
= FOP(26, FMT_S
),
10574 OPC_CLASS_S
= FOP(27, FMT_S
),
10575 OPC_MIN_S
= FOP(28, FMT_S
),
10576 OPC_RECIP2_S
= FOP(28, FMT_S
),
10577 OPC_MINA_S
= FOP(29, FMT_S
),
10578 OPC_RECIP1_S
= FOP(29, FMT_S
),
10579 OPC_MAX_S
= FOP(30, FMT_S
),
10580 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10581 OPC_MAXA_S
= FOP(31, FMT_S
),
10582 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10583 OPC_CVT_D_S
= FOP(33, FMT_S
),
10584 OPC_CVT_W_S
= FOP(36, FMT_S
),
10585 OPC_CVT_L_S
= FOP(37, FMT_S
),
10586 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10587 OPC_CMP_F_S
= FOP(48, FMT_S
),
10588 OPC_CMP_UN_S
= FOP(49, FMT_S
),
10589 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
10590 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
10591 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
10592 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
10593 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
10594 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
10595 OPC_CMP_SF_S
= FOP(56, FMT_S
),
10596 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
10597 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
10598 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
10599 OPC_CMP_LT_S
= FOP(60, FMT_S
),
10600 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
10601 OPC_CMP_LE_S
= FOP(62, FMT_S
),
10602 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
10604 OPC_ADD_D
= FOP(0, FMT_D
),
10605 OPC_SUB_D
= FOP(1, FMT_D
),
10606 OPC_MUL_D
= FOP(2, FMT_D
),
10607 OPC_DIV_D
= FOP(3, FMT_D
),
10608 OPC_SQRT_D
= FOP(4, FMT_D
),
10609 OPC_ABS_D
= FOP(5, FMT_D
),
10610 OPC_MOV_D
= FOP(6, FMT_D
),
10611 OPC_NEG_D
= FOP(7, FMT_D
),
10612 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10613 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10614 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10615 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10616 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10617 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10618 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10619 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10620 OPC_SEL_D
= FOP(16, FMT_D
),
10621 OPC_MOVCF_D
= FOP(17, FMT_D
),
10622 OPC_MOVZ_D
= FOP(18, FMT_D
),
10623 OPC_MOVN_D
= FOP(19, FMT_D
),
10624 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10625 OPC_RECIP_D
= FOP(21, FMT_D
),
10626 OPC_RSQRT_D
= FOP(22, FMT_D
),
10627 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10628 OPC_MADDF_D
= FOP(24, FMT_D
),
10629 OPC_MSUBF_D
= FOP(25, FMT_D
),
10630 OPC_RINT_D
= FOP(26, FMT_D
),
10631 OPC_CLASS_D
= FOP(27, FMT_D
),
10632 OPC_MIN_D
= FOP(28, FMT_D
),
10633 OPC_RECIP2_D
= FOP(28, FMT_D
),
10634 OPC_MINA_D
= FOP(29, FMT_D
),
10635 OPC_RECIP1_D
= FOP(29, FMT_D
),
10636 OPC_MAX_D
= FOP(30, FMT_D
),
10637 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10638 OPC_MAXA_D
= FOP(31, FMT_D
),
10639 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10640 OPC_CVT_S_D
= FOP(32, FMT_D
),
10641 OPC_CVT_W_D
= FOP(36, FMT_D
),
10642 OPC_CVT_L_D
= FOP(37, FMT_D
),
10643 OPC_CMP_F_D
= FOP(48, FMT_D
),
10644 OPC_CMP_UN_D
= FOP(49, FMT_D
),
10645 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
10646 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
10647 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
10648 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
10649 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
10650 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
10651 OPC_CMP_SF_D
= FOP(56, FMT_D
),
10652 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
10653 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
10654 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
10655 OPC_CMP_LT_D
= FOP(60, FMT_D
),
10656 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
10657 OPC_CMP_LE_D
= FOP(62, FMT_D
),
10658 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
10660 OPC_CVT_S_W
= FOP(32, FMT_W
),
10661 OPC_CVT_D_W
= FOP(33, FMT_W
),
10662 OPC_CVT_S_L
= FOP(32, FMT_L
),
10663 OPC_CVT_D_L
= FOP(33, FMT_L
),
10664 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10666 OPC_ADD_PS
= FOP(0, FMT_PS
),
10667 OPC_SUB_PS
= FOP(1, FMT_PS
),
10668 OPC_MUL_PS
= FOP(2, FMT_PS
),
10669 OPC_DIV_PS
= FOP(3, FMT_PS
),
10670 OPC_ABS_PS
= FOP(5, FMT_PS
),
10671 OPC_MOV_PS
= FOP(6, FMT_PS
),
10672 OPC_NEG_PS
= FOP(7, FMT_PS
),
10673 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10674 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10675 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10676 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10677 OPC_MULR_PS
= FOP(26, FMT_PS
),
10678 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10679 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10680 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10681 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10683 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10684 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10685 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10686 OPC_PLL_PS
= FOP(44, FMT_PS
),
10687 OPC_PLU_PS
= FOP(45, FMT_PS
),
10688 OPC_PUL_PS
= FOP(46, FMT_PS
),
10689 OPC_PUU_PS
= FOP(47, FMT_PS
),
10690 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
10691 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
10692 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
10693 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
10694 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
10695 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
10696 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
10697 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
10698 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
10699 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
10700 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
10701 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
10702 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
10703 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
10704 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
10705 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
10709 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10710 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10711 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10712 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10713 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10714 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10715 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10716 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10717 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10718 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10719 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10720 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10721 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10722 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10723 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10724 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10725 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10726 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10727 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10728 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10729 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10730 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10732 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10733 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10734 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10735 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10736 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10737 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10738 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10739 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10740 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10741 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10742 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10743 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10744 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10745 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10746 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10747 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10748 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10749 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10750 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10751 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10752 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10753 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10756 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10758 TCGv t0
= tcg_temp_new();
10763 TCGv_i32 fp0
= tcg_temp_new_i32();
10765 gen_load_fpr32(ctx
, fp0
, fs
);
10766 tcg_gen_ext_i32_tl(t0
, fp0
);
10767 tcg_temp_free_i32(fp0
);
10769 gen_store_gpr(t0
, rt
);
10772 gen_load_gpr(t0
, rt
);
10774 TCGv_i32 fp0
= tcg_temp_new_i32();
10776 tcg_gen_trunc_tl_i32(fp0
, t0
);
10777 gen_store_fpr32(ctx
, fp0
, fs
);
10778 tcg_temp_free_i32(fp0
);
10782 gen_helper_1e0i(cfc1
, t0
, fs
);
10783 gen_store_gpr(t0
, rt
);
10786 gen_load_gpr(t0
, rt
);
10787 save_cpu_state(ctx
, 0);
10789 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10791 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10792 tcg_temp_free_i32(fs_tmp
);
10794 /* Stop translation as we may have changed hflags */
10795 ctx
->base
.is_jmp
= DISAS_STOP
;
10797 #if defined(TARGET_MIPS64)
10799 gen_load_fpr64(ctx
, t0
, fs
);
10800 gen_store_gpr(t0
, rt
);
10803 gen_load_gpr(t0
, rt
);
10804 gen_store_fpr64(ctx
, t0
, fs
);
10809 TCGv_i32 fp0
= tcg_temp_new_i32();
10811 gen_load_fpr32h(ctx
, fp0
, fs
);
10812 tcg_gen_ext_i32_tl(t0
, fp0
);
10813 tcg_temp_free_i32(fp0
);
10815 gen_store_gpr(t0
, rt
);
10818 gen_load_gpr(t0
, rt
);
10820 TCGv_i32 fp0
= tcg_temp_new_i32();
10822 tcg_gen_trunc_tl_i32(fp0
, t0
);
10823 gen_store_fpr32h(ctx
, fp0
, fs
);
10824 tcg_temp_free_i32(fp0
);
10828 MIPS_INVAL("cp1 move");
10829 generate_exception_end(ctx
, EXCP_RI
);
10837 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10844 /* Treat as NOP. */
10849 cond
= TCG_COND_EQ
;
10851 cond
= TCG_COND_NE
;
10854 l1
= gen_new_label();
10855 t0
= tcg_temp_new_i32();
10856 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10857 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10858 tcg_temp_free_i32(t0
);
10860 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10862 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10867 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10871 TCGv_i32 t0
= tcg_temp_new_i32();
10872 TCGLabel
*l1
= gen_new_label();
10875 cond
= TCG_COND_EQ
;
10877 cond
= TCG_COND_NE
;
10880 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10881 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10882 gen_load_fpr32(ctx
, t0
, fs
);
10883 gen_store_fpr32(ctx
, t0
, fd
);
10885 tcg_temp_free_i32(t0
);
10888 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10892 TCGv_i32 t0
= tcg_temp_new_i32();
10894 TCGLabel
*l1
= gen_new_label();
10897 cond
= TCG_COND_EQ
;
10899 cond
= TCG_COND_NE
;
10902 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10903 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10904 tcg_temp_free_i32(t0
);
10905 fp0
= tcg_temp_new_i64();
10906 gen_load_fpr64(ctx
, fp0
, fs
);
10907 gen_store_fpr64(ctx
, fp0
, fd
);
10908 tcg_temp_free_i64(fp0
);
10912 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10916 TCGv_i32 t0
= tcg_temp_new_i32();
10917 TCGLabel
*l1
= gen_new_label();
10918 TCGLabel
*l2
= gen_new_label();
10921 cond
= TCG_COND_EQ
;
10923 cond
= TCG_COND_NE
;
10926 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10927 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10928 gen_load_fpr32(ctx
, t0
, fs
);
10929 gen_store_fpr32(ctx
, t0
, fd
);
10932 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
10933 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10934 gen_load_fpr32h(ctx
, t0
, fs
);
10935 gen_store_fpr32h(ctx
, t0
, fd
);
10936 tcg_temp_free_i32(t0
);
10940 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10943 TCGv_i32 t1
= tcg_const_i32(0);
10944 TCGv_i32 fp0
= tcg_temp_new_i32();
10945 TCGv_i32 fp1
= tcg_temp_new_i32();
10946 TCGv_i32 fp2
= tcg_temp_new_i32();
10947 gen_load_fpr32(ctx
, fp0
, fd
);
10948 gen_load_fpr32(ctx
, fp1
, ft
);
10949 gen_load_fpr32(ctx
, fp2
, fs
);
10953 tcg_gen_andi_i32(fp0
, fp0
, 1);
10954 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10957 tcg_gen_andi_i32(fp1
, fp1
, 1);
10958 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10961 tcg_gen_andi_i32(fp1
, fp1
, 1);
10962 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
10965 MIPS_INVAL("gen_sel_s");
10966 generate_exception_end(ctx
, EXCP_RI
);
10970 gen_store_fpr32(ctx
, fp0
, fd
);
10971 tcg_temp_free_i32(fp2
);
10972 tcg_temp_free_i32(fp1
);
10973 tcg_temp_free_i32(fp0
);
10974 tcg_temp_free_i32(t1
);
10977 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10980 TCGv_i64 t1
= tcg_const_i64(0);
10981 TCGv_i64 fp0
= tcg_temp_new_i64();
10982 TCGv_i64 fp1
= tcg_temp_new_i64();
10983 TCGv_i64 fp2
= tcg_temp_new_i64();
10984 gen_load_fpr64(ctx
, fp0
, fd
);
10985 gen_load_fpr64(ctx
, fp1
, ft
);
10986 gen_load_fpr64(ctx
, fp2
, fs
);
10990 tcg_gen_andi_i64(fp0
, fp0
, 1);
10991 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10994 tcg_gen_andi_i64(fp1
, fp1
, 1);
10995 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10998 tcg_gen_andi_i64(fp1
, fp1
, 1);
10999 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
11002 MIPS_INVAL("gen_sel_d");
11003 generate_exception_end(ctx
, EXCP_RI
);
11007 gen_store_fpr64(ctx
, fp0
, fd
);
11008 tcg_temp_free_i64(fp2
);
11009 tcg_temp_free_i64(fp1
);
11010 tcg_temp_free_i64(fp0
);
11011 tcg_temp_free_i64(t1
);
11014 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
11015 int ft
, int fs
, int fd
, int cc
)
11017 uint32_t func
= ctx
->opcode
& 0x3f;
11021 TCGv_i32 fp0
= tcg_temp_new_i32();
11022 TCGv_i32 fp1
= tcg_temp_new_i32();
11024 gen_load_fpr32(ctx
, fp0
, fs
);
11025 gen_load_fpr32(ctx
, fp1
, ft
);
11026 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
11027 tcg_temp_free_i32(fp1
);
11028 gen_store_fpr32(ctx
, fp0
, fd
);
11029 tcg_temp_free_i32(fp0
);
11034 TCGv_i32 fp0
= tcg_temp_new_i32();
11035 TCGv_i32 fp1
= tcg_temp_new_i32();
11037 gen_load_fpr32(ctx
, fp0
, fs
);
11038 gen_load_fpr32(ctx
, fp1
, ft
);
11039 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
11040 tcg_temp_free_i32(fp1
);
11041 gen_store_fpr32(ctx
, fp0
, fd
);
11042 tcg_temp_free_i32(fp0
);
11047 TCGv_i32 fp0
= tcg_temp_new_i32();
11048 TCGv_i32 fp1
= tcg_temp_new_i32();
11050 gen_load_fpr32(ctx
, fp0
, fs
);
11051 gen_load_fpr32(ctx
, fp1
, ft
);
11052 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
11053 tcg_temp_free_i32(fp1
);
11054 gen_store_fpr32(ctx
, fp0
, fd
);
11055 tcg_temp_free_i32(fp0
);
11060 TCGv_i32 fp0
= tcg_temp_new_i32();
11061 TCGv_i32 fp1
= tcg_temp_new_i32();
11063 gen_load_fpr32(ctx
, fp0
, fs
);
11064 gen_load_fpr32(ctx
, fp1
, ft
);
11065 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
11066 tcg_temp_free_i32(fp1
);
11067 gen_store_fpr32(ctx
, fp0
, fd
);
11068 tcg_temp_free_i32(fp0
);
11073 TCGv_i32 fp0
= tcg_temp_new_i32();
11075 gen_load_fpr32(ctx
, fp0
, fs
);
11076 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
11077 gen_store_fpr32(ctx
, fp0
, fd
);
11078 tcg_temp_free_i32(fp0
);
11083 TCGv_i32 fp0
= tcg_temp_new_i32();
11085 gen_load_fpr32(ctx
, fp0
, fs
);
11086 if (ctx
->abs2008
) {
11087 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11089 gen_helper_float_abs_s(fp0
, fp0
);
11091 gen_store_fpr32(ctx
, fp0
, fd
);
11092 tcg_temp_free_i32(fp0
);
11097 TCGv_i32 fp0
= tcg_temp_new_i32();
11099 gen_load_fpr32(ctx
, fp0
, fs
);
11100 gen_store_fpr32(ctx
, fp0
, fd
);
11101 tcg_temp_free_i32(fp0
);
11106 TCGv_i32 fp0
= tcg_temp_new_i32();
11108 gen_load_fpr32(ctx
, fp0
, fs
);
11109 if (ctx
->abs2008
) {
11110 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11112 gen_helper_float_chs_s(fp0
, fp0
);
11114 gen_store_fpr32(ctx
, fp0
, fd
);
11115 tcg_temp_free_i32(fp0
);
11118 case OPC_ROUND_L_S
:
11119 check_cp1_64bitmode(ctx
);
11121 TCGv_i32 fp32
= tcg_temp_new_i32();
11122 TCGv_i64 fp64
= tcg_temp_new_i64();
11124 gen_load_fpr32(ctx
, fp32
, fs
);
11125 if (ctx
->nan2008
) {
11126 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11128 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11130 tcg_temp_free_i32(fp32
);
11131 gen_store_fpr64(ctx
, fp64
, fd
);
11132 tcg_temp_free_i64(fp64
);
11135 case OPC_TRUNC_L_S
:
11136 check_cp1_64bitmode(ctx
);
11138 TCGv_i32 fp32
= tcg_temp_new_i32();
11139 TCGv_i64 fp64
= tcg_temp_new_i64();
11141 gen_load_fpr32(ctx
, fp32
, fs
);
11142 if (ctx
->nan2008
) {
11143 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11145 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11147 tcg_temp_free_i32(fp32
);
11148 gen_store_fpr64(ctx
, fp64
, fd
);
11149 tcg_temp_free_i64(fp64
);
11153 check_cp1_64bitmode(ctx
);
11155 TCGv_i32 fp32
= tcg_temp_new_i32();
11156 TCGv_i64 fp64
= tcg_temp_new_i64();
11158 gen_load_fpr32(ctx
, fp32
, fs
);
11159 if (ctx
->nan2008
) {
11160 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11162 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11164 tcg_temp_free_i32(fp32
);
11165 gen_store_fpr64(ctx
, fp64
, fd
);
11166 tcg_temp_free_i64(fp64
);
11169 case OPC_FLOOR_L_S
:
11170 check_cp1_64bitmode(ctx
);
11172 TCGv_i32 fp32
= tcg_temp_new_i32();
11173 TCGv_i64 fp64
= tcg_temp_new_i64();
11175 gen_load_fpr32(ctx
, fp32
, fs
);
11176 if (ctx
->nan2008
) {
11177 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11179 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11181 tcg_temp_free_i32(fp32
);
11182 gen_store_fpr64(ctx
, fp64
, fd
);
11183 tcg_temp_free_i64(fp64
);
11186 case OPC_ROUND_W_S
:
11188 TCGv_i32 fp0
= tcg_temp_new_i32();
11190 gen_load_fpr32(ctx
, fp0
, fs
);
11191 if (ctx
->nan2008
) {
11192 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11194 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11196 gen_store_fpr32(ctx
, fp0
, fd
);
11197 tcg_temp_free_i32(fp0
);
11200 case OPC_TRUNC_W_S
:
11202 TCGv_i32 fp0
= tcg_temp_new_i32();
11204 gen_load_fpr32(ctx
, fp0
, fs
);
11205 if (ctx
->nan2008
) {
11206 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11208 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11210 gen_store_fpr32(ctx
, fp0
, fd
);
11211 tcg_temp_free_i32(fp0
);
11216 TCGv_i32 fp0
= tcg_temp_new_i32();
11218 gen_load_fpr32(ctx
, fp0
, fs
);
11219 if (ctx
->nan2008
) {
11220 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11222 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11224 gen_store_fpr32(ctx
, fp0
, fd
);
11225 tcg_temp_free_i32(fp0
);
11228 case OPC_FLOOR_W_S
:
11230 TCGv_i32 fp0
= tcg_temp_new_i32();
11232 gen_load_fpr32(ctx
, fp0
, fs
);
11233 if (ctx
->nan2008
) {
11234 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11236 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11238 gen_store_fpr32(ctx
, fp0
, fd
);
11239 tcg_temp_free_i32(fp0
);
11243 check_insn(ctx
, ISA_MIPS32R6
);
11244 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11247 check_insn(ctx
, ISA_MIPS32R6
);
11248 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11251 check_insn(ctx
, ISA_MIPS32R6
);
11252 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11255 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11256 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11259 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11261 TCGLabel
*l1
= gen_new_label();
11265 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11267 fp0
= tcg_temp_new_i32();
11268 gen_load_fpr32(ctx
, fp0
, fs
);
11269 gen_store_fpr32(ctx
, fp0
, fd
);
11270 tcg_temp_free_i32(fp0
);
11275 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11277 TCGLabel
*l1
= gen_new_label();
11281 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11282 fp0
= tcg_temp_new_i32();
11283 gen_load_fpr32(ctx
, fp0
, fs
);
11284 gen_store_fpr32(ctx
, fp0
, fd
);
11285 tcg_temp_free_i32(fp0
);
11292 TCGv_i32 fp0
= tcg_temp_new_i32();
11294 gen_load_fpr32(ctx
, fp0
, fs
);
11295 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11296 gen_store_fpr32(ctx
, fp0
, fd
);
11297 tcg_temp_free_i32(fp0
);
11302 TCGv_i32 fp0
= tcg_temp_new_i32();
11304 gen_load_fpr32(ctx
, fp0
, fs
);
11305 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11306 gen_store_fpr32(ctx
, fp0
, fd
);
11307 tcg_temp_free_i32(fp0
);
11311 check_insn(ctx
, ISA_MIPS32R6
);
11313 TCGv_i32 fp0
= tcg_temp_new_i32();
11314 TCGv_i32 fp1
= tcg_temp_new_i32();
11315 TCGv_i32 fp2
= tcg_temp_new_i32();
11316 gen_load_fpr32(ctx
, fp0
, fs
);
11317 gen_load_fpr32(ctx
, fp1
, ft
);
11318 gen_load_fpr32(ctx
, fp2
, fd
);
11319 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11320 gen_store_fpr32(ctx
, fp2
, fd
);
11321 tcg_temp_free_i32(fp2
);
11322 tcg_temp_free_i32(fp1
);
11323 tcg_temp_free_i32(fp0
);
11327 check_insn(ctx
, ISA_MIPS32R6
);
11329 TCGv_i32 fp0
= tcg_temp_new_i32();
11330 TCGv_i32 fp1
= tcg_temp_new_i32();
11331 TCGv_i32 fp2
= tcg_temp_new_i32();
11332 gen_load_fpr32(ctx
, fp0
, fs
);
11333 gen_load_fpr32(ctx
, fp1
, ft
);
11334 gen_load_fpr32(ctx
, fp2
, fd
);
11335 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11336 gen_store_fpr32(ctx
, fp2
, fd
);
11337 tcg_temp_free_i32(fp2
);
11338 tcg_temp_free_i32(fp1
);
11339 tcg_temp_free_i32(fp0
);
11343 check_insn(ctx
, ISA_MIPS32R6
);
11345 TCGv_i32 fp0
= tcg_temp_new_i32();
11346 gen_load_fpr32(ctx
, fp0
, fs
);
11347 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11348 gen_store_fpr32(ctx
, fp0
, fd
);
11349 tcg_temp_free_i32(fp0
);
11353 check_insn(ctx
, ISA_MIPS32R6
);
11355 TCGv_i32 fp0
= tcg_temp_new_i32();
11356 gen_load_fpr32(ctx
, fp0
, fs
);
11357 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11358 gen_store_fpr32(ctx
, fp0
, fd
);
11359 tcg_temp_free_i32(fp0
);
11362 case OPC_MIN_S
: /* OPC_RECIP2_S */
11363 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11365 TCGv_i32 fp0
= tcg_temp_new_i32();
11366 TCGv_i32 fp1
= tcg_temp_new_i32();
11367 TCGv_i32 fp2
= tcg_temp_new_i32();
11368 gen_load_fpr32(ctx
, fp0
, fs
);
11369 gen_load_fpr32(ctx
, fp1
, ft
);
11370 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11371 gen_store_fpr32(ctx
, fp2
, fd
);
11372 tcg_temp_free_i32(fp2
);
11373 tcg_temp_free_i32(fp1
);
11374 tcg_temp_free_i32(fp0
);
11377 check_cp1_64bitmode(ctx
);
11379 TCGv_i32 fp0
= tcg_temp_new_i32();
11380 TCGv_i32 fp1
= tcg_temp_new_i32();
11382 gen_load_fpr32(ctx
, fp0
, fs
);
11383 gen_load_fpr32(ctx
, fp1
, ft
);
11384 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11385 tcg_temp_free_i32(fp1
);
11386 gen_store_fpr32(ctx
, fp0
, fd
);
11387 tcg_temp_free_i32(fp0
);
11391 case OPC_MINA_S
: /* OPC_RECIP1_S */
11392 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11394 TCGv_i32 fp0
= tcg_temp_new_i32();
11395 TCGv_i32 fp1
= tcg_temp_new_i32();
11396 TCGv_i32 fp2
= tcg_temp_new_i32();
11397 gen_load_fpr32(ctx
, fp0
, fs
);
11398 gen_load_fpr32(ctx
, fp1
, ft
);
11399 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11400 gen_store_fpr32(ctx
, fp2
, fd
);
11401 tcg_temp_free_i32(fp2
);
11402 tcg_temp_free_i32(fp1
);
11403 tcg_temp_free_i32(fp0
);
11406 check_cp1_64bitmode(ctx
);
11408 TCGv_i32 fp0
= tcg_temp_new_i32();
11410 gen_load_fpr32(ctx
, fp0
, fs
);
11411 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11412 gen_store_fpr32(ctx
, fp0
, fd
);
11413 tcg_temp_free_i32(fp0
);
11417 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11418 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11420 TCGv_i32 fp0
= tcg_temp_new_i32();
11421 TCGv_i32 fp1
= tcg_temp_new_i32();
11422 gen_load_fpr32(ctx
, fp0
, fs
);
11423 gen_load_fpr32(ctx
, fp1
, ft
);
11424 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11425 gen_store_fpr32(ctx
, fp1
, fd
);
11426 tcg_temp_free_i32(fp1
);
11427 tcg_temp_free_i32(fp0
);
11430 check_cp1_64bitmode(ctx
);
11432 TCGv_i32 fp0
= tcg_temp_new_i32();
11434 gen_load_fpr32(ctx
, fp0
, fs
);
11435 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11436 gen_store_fpr32(ctx
, fp0
, fd
);
11437 tcg_temp_free_i32(fp0
);
11441 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11442 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11444 TCGv_i32 fp0
= tcg_temp_new_i32();
11445 TCGv_i32 fp1
= tcg_temp_new_i32();
11446 gen_load_fpr32(ctx
, fp0
, fs
);
11447 gen_load_fpr32(ctx
, fp1
, ft
);
11448 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11449 gen_store_fpr32(ctx
, fp1
, fd
);
11450 tcg_temp_free_i32(fp1
);
11451 tcg_temp_free_i32(fp0
);
11454 check_cp1_64bitmode(ctx
);
11456 TCGv_i32 fp0
= tcg_temp_new_i32();
11457 TCGv_i32 fp1
= tcg_temp_new_i32();
11459 gen_load_fpr32(ctx
, fp0
, fs
);
11460 gen_load_fpr32(ctx
, fp1
, ft
);
11461 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11462 tcg_temp_free_i32(fp1
);
11463 gen_store_fpr32(ctx
, fp0
, fd
);
11464 tcg_temp_free_i32(fp0
);
11469 check_cp1_registers(ctx
, fd
);
11471 TCGv_i32 fp32
= tcg_temp_new_i32();
11472 TCGv_i64 fp64
= tcg_temp_new_i64();
11474 gen_load_fpr32(ctx
, fp32
, fs
);
11475 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11476 tcg_temp_free_i32(fp32
);
11477 gen_store_fpr64(ctx
, fp64
, fd
);
11478 tcg_temp_free_i64(fp64
);
11483 TCGv_i32 fp0
= tcg_temp_new_i32();
11485 gen_load_fpr32(ctx
, fp0
, fs
);
11486 if (ctx
->nan2008
) {
11487 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11489 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11491 gen_store_fpr32(ctx
, fp0
, fd
);
11492 tcg_temp_free_i32(fp0
);
11496 check_cp1_64bitmode(ctx
);
11498 TCGv_i32 fp32
= tcg_temp_new_i32();
11499 TCGv_i64 fp64
= tcg_temp_new_i64();
11501 gen_load_fpr32(ctx
, fp32
, fs
);
11502 if (ctx
->nan2008
) {
11503 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11505 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11507 tcg_temp_free_i32(fp32
);
11508 gen_store_fpr64(ctx
, fp64
, fd
);
11509 tcg_temp_free_i64(fp64
);
11515 TCGv_i64 fp64
= tcg_temp_new_i64();
11516 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11517 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11519 gen_load_fpr32(ctx
, fp32_0
, fs
);
11520 gen_load_fpr32(ctx
, fp32_1
, ft
);
11521 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11522 tcg_temp_free_i32(fp32_1
);
11523 tcg_temp_free_i32(fp32_0
);
11524 gen_store_fpr64(ctx
, fp64
, fd
);
11525 tcg_temp_free_i64(fp64
);
11531 case OPC_CMP_UEQ_S
:
11532 case OPC_CMP_OLT_S
:
11533 case OPC_CMP_ULT_S
:
11534 case OPC_CMP_OLE_S
:
11535 case OPC_CMP_ULE_S
:
11537 case OPC_CMP_NGLE_S
:
11538 case OPC_CMP_SEQ_S
:
11539 case OPC_CMP_NGL_S
:
11541 case OPC_CMP_NGE_S
:
11543 case OPC_CMP_NGT_S
:
11544 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11545 if (ctx
->opcode
& (1 << 6)) {
11546 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
11548 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
11552 check_cp1_registers(ctx
, fs
| ft
| fd
);
11554 TCGv_i64 fp0
= tcg_temp_new_i64();
11555 TCGv_i64 fp1
= tcg_temp_new_i64();
11557 gen_load_fpr64(ctx
, fp0
, fs
);
11558 gen_load_fpr64(ctx
, fp1
, ft
);
11559 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11560 tcg_temp_free_i64(fp1
);
11561 gen_store_fpr64(ctx
, fp0
, fd
);
11562 tcg_temp_free_i64(fp0
);
11566 check_cp1_registers(ctx
, fs
| ft
| fd
);
11568 TCGv_i64 fp0
= tcg_temp_new_i64();
11569 TCGv_i64 fp1
= tcg_temp_new_i64();
11571 gen_load_fpr64(ctx
, fp0
, fs
);
11572 gen_load_fpr64(ctx
, fp1
, ft
);
11573 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11574 tcg_temp_free_i64(fp1
);
11575 gen_store_fpr64(ctx
, fp0
, fd
);
11576 tcg_temp_free_i64(fp0
);
11580 check_cp1_registers(ctx
, fs
| ft
| fd
);
11582 TCGv_i64 fp0
= tcg_temp_new_i64();
11583 TCGv_i64 fp1
= tcg_temp_new_i64();
11585 gen_load_fpr64(ctx
, fp0
, fs
);
11586 gen_load_fpr64(ctx
, fp1
, ft
);
11587 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11588 tcg_temp_free_i64(fp1
);
11589 gen_store_fpr64(ctx
, fp0
, fd
);
11590 tcg_temp_free_i64(fp0
);
11594 check_cp1_registers(ctx
, fs
| ft
| fd
);
11596 TCGv_i64 fp0
= tcg_temp_new_i64();
11597 TCGv_i64 fp1
= tcg_temp_new_i64();
11599 gen_load_fpr64(ctx
, fp0
, fs
);
11600 gen_load_fpr64(ctx
, fp1
, ft
);
11601 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11602 tcg_temp_free_i64(fp1
);
11603 gen_store_fpr64(ctx
, fp0
, fd
);
11604 tcg_temp_free_i64(fp0
);
11608 check_cp1_registers(ctx
, fs
| fd
);
11610 TCGv_i64 fp0
= tcg_temp_new_i64();
11612 gen_load_fpr64(ctx
, fp0
, fs
);
11613 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11614 gen_store_fpr64(ctx
, fp0
, fd
);
11615 tcg_temp_free_i64(fp0
);
11619 check_cp1_registers(ctx
, fs
| fd
);
11621 TCGv_i64 fp0
= tcg_temp_new_i64();
11623 gen_load_fpr64(ctx
, fp0
, fs
);
11624 if (ctx
->abs2008
) {
11625 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11627 gen_helper_float_abs_d(fp0
, fp0
);
11629 gen_store_fpr64(ctx
, fp0
, fd
);
11630 tcg_temp_free_i64(fp0
);
11634 check_cp1_registers(ctx
, fs
| fd
);
11636 TCGv_i64 fp0
= tcg_temp_new_i64();
11638 gen_load_fpr64(ctx
, fp0
, fs
);
11639 gen_store_fpr64(ctx
, fp0
, fd
);
11640 tcg_temp_free_i64(fp0
);
11644 check_cp1_registers(ctx
, fs
| fd
);
11646 TCGv_i64 fp0
= tcg_temp_new_i64();
11648 gen_load_fpr64(ctx
, fp0
, fs
);
11649 if (ctx
->abs2008
) {
11650 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11652 gen_helper_float_chs_d(fp0
, fp0
);
11654 gen_store_fpr64(ctx
, fp0
, fd
);
11655 tcg_temp_free_i64(fp0
);
11658 case OPC_ROUND_L_D
:
11659 check_cp1_64bitmode(ctx
);
11661 TCGv_i64 fp0
= tcg_temp_new_i64();
11663 gen_load_fpr64(ctx
, fp0
, fs
);
11664 if (ctx
->nan2008
) {
11665 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11667 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11669 gen_store_fpr64(ctx
, fp0
, fd
);
11670 tcg_temp_free_i64(fp0
);
11673 case OPC_TRUNC_L_D
:
11674 check_cp1_64bitmode(ctx
);
11676 TCGv_i64 fp0
= tcg_temp_new_i64();
11678 gen_load_fpr64(ctx
, fp0
, fs
);
11679 if (ctx
->nan2008
) {
11680 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11682 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11684 gen_store_fpr64(ctx
, fp0
, fd
);
11685 tcg_temp_free_i64(fp0
);
11689 check_cp1_64bitmode(ctx
);
11691 TCGv_i64 fp0
= tcg_temp_new_i64();
11693 gen_load_fpr64(ctx
, fp0
, fs
);
11694 if (ctx
->nan2008
) {
11695 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11697 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11699 gen_store_fpr64(ctx
, fp0
, fd
);
11700 tcg_temp_free_i64(fp0
);
11703 case OPC_FLOOR_L_D
:
11704 check_cp1_64bitmode(ctx
);
11706 TCGv_i64 fp0
= tcg_temp_new_i64();
11708 gen_load_fpr64(ctx
, fp0
, fs
);
11709 if (ctx
->nan2008
) {
11710 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11712 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11714 gen_store_fpr64(ctx
, fp0
, fd
);
11715 tcg_temp_free_i64(fp0
);
11718 case OPC_ROUND_W_D
:
11719 check_cp1_registers(ctx
, fs
);
11721 TCGv_i32 fp32
= tcg_temp_new_i32();
11722 TCGv_i64 fp64
= tcg_temp_new_i64();
11724 gen_load_fpr64(ctx
, fp64
, fs
);
11725 if (ctx
->nan2008
) {
11726 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11728 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11730 tcg_temp_free_i64(fp64
);
11731 gen_store_fpr32(ctx
, fp32
, fd
);
11732 tcg_temp_free_i32(fp32
);
11735 case OPC_TRUNC_W_D
:
11736 check_cp1_registers(ctx
, fs
);
11738 TCGv_i32 fp32
= tcg_temp_new_i32();
11739 TCGv_i64 fp64
= tcg_temp_new_i64();
11741 gen_load_fpr64(ctx
, fp64
, fs
);
11742 if (ctx
->nan2008
) {
11743 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11745 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11747 tcg_temp_free_i64(fp64
);
11748 gen_store_fpr32(ctx
, fp32
, fd
);
11749 tcg_temp_free_i32(fp32
);
11753 check_cp1_registers(ctx
, fs
);
11755 TCGv_i32 fp32
= tcg_temp_new_i32();
11756 TCGv_i64 fp64
= tcg_temp_new_i64();
11758 gen_load_fpr64(ctx
, fp64
, fs
);
11759 if (ctx
->nan2008
) {
11760 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11762 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11764 tcg_temp_free_i64(fp64
);
11765 gen_store_fpr32(ctx
, fp32
, fd
);
11766 tcg_temp_free_i32(fp32
);
11769 case OPC_FLOOR_W_D
:
11770 check_cp1_registers(ctx
, fs
);
11772 TCGv_i32 fp32
= tcg_temp_new_i32();
11773 TCGv_i64 fp64
= tcg_temp_new_i64();
11775 gen_load_fpr64(ctx
, fp64
, fs
);
11776 if (ctx
->nan2008
) {
11777 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11779 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11781 tcg_temp_free_i64(fp64
);
11782 gen_store_fpr32(ctx
, fp32
, fd
);
11783 tcg_temp_free_i32(fp32
);
11787 check_insn(ctx
, ISA_MIPS32R6
);
11788 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11791 check_insn(ctx
, ISA_MIPS32R6
);
11792 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11795 check_insn(ctx
, ISA_MIPS32R6
);
11796 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11799 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11800 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11803 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11805 TCGLabel
*l1
= gen_new_label();
11809 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11811 fp0
= tcg_temp_new_i64();
11812 gen_load_fpr64(ctx
, fp0
, fs
);
11813 gen_store_fpr64(ctx
, fp0
, fd
);
11814 tcg_temp_free_i64(fp0
);
11819 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
11821 TCGLabel
*l1
= gen_new_label();
11825 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11826 fp0
= tcg_temp_new_i64();
11827 gen_load_fpr64(ctx
, fp0
, fs
);
11828 gen_store_fpr64(ctx
, fp0
, fd
);
11829 tcg_temp_free_i64(fp0
);
11835 check_cp1_registers(ctx
, fs
| fd
);
11837 TCGv_i64 fp0
= tcg_temp_new_i64();
11839 gen_load_fpr64(ctx
, fp0
, fs
);
11840 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11841 gen_store_fpr64(ctx
, fp0
, fd
);
11842 tcg_temp_free_i64(fp0
);
11846 check_cp1_registers(ctx
, fs
| fd
);
11848 TCGv_i64 fp0
= tcg_temp_new_i64();
11850 gen_load_fpr64(ctx
, fp0
, fs
);
11851 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11852 gen_store_fpr64(ctx
, fp0
, fd
);
11853 tcg_temp_free_i64(fp0
);
11857 check_insn(ctx
, ISA_MIPS32R6
);
11859 TCGv_i64 fp0
= tcg_temp_new_i64();
11860 TCGv_i64 fp1
= tcg_temp_new_i64();
11861 TCGv_i64 fp2
= tcg_temp_new_i64();
11862 gen_load_fpr64(ctx
, fp0
, fs
);
11863 gen_load_fpr64(ctx
, fp1
, ft
);
11864 gen_load_fpr64(ctx
, fp2
, fd
);
11865 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11866 gen_store_fpr64(ctx
, fp2
, fd
);
11867 tcg_temp_free_i64(fp2
);
11868 tcg_temp_free_i64(fp1
);
11869 tcg_temp_free_i64(fp0
);
11873 check_insn(ctx
, ISA_MIPS32R6
);
11875 TCGv_i64 fp0
= tcg_temp_new_i64();
11876 TCGv_i64 fp1
= tcg_temp_new_i64();
11877 TCGv_i64 fp2
= tcg_temp_new_i64();
11878 gen_load_fpr64(ctx
, fp0
, fs
);
11879 gen_load_fpr64(ctx
, fp1
, ft
);
11880 gen_load_fpr64(ctx
, fp2
, fd
);
11881 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11882 gen_store_fpr64(ctx
, fp2
, fd
);
11883 tcg_temp_free_i64(fp2
);
11884 tcg_temp_free_i64(fp1
);
11885 tcg_temp_free_i64(fp0
);
11889 check_insn(ctx
, ISA_MIPS32R6
);
11891 TCGv_i64 fp0
= tcg_temp_new_i64();
11892 gen_load_fpr64(ctx
, fp0
, fs
);
11893 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11894 gen_store_fpr64(ctx
, fp0
, fd
);
11895 tcg_temp_free_i64(fp0
);
11899 check_insn(ctx
, ISA_MIPS32R6
);
11901 TCGv_i64 fp0
= tcg_temp_new_i64();
11902 gen_load_fpr64(ctx
, fp0
, fs
);
11903 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11904 gen_store_fpr64(ctx
, fp0
, fd
);
11905 tcg_temp_free_i64(fp0
);
11908 case OPC_MIN_D
: /* OPC_RECIP2_D */
11909 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11911 TCGv_i64 fp0
= tcg_temp_new_i64();
11912 TCGv_i64 fp1
= tcg_temp_new_i64();
11913 gen_load_fpr64(ctx
, fp0
, fs
);
11914 gen_load_fpr64(ctx
, fp1
, ft
);
11915 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11916 gen_store_fpr64(ctx
, fp1
, fd
);
11917 tcg_temp_free_i64(fp1
);
11918 tcg_temp_free_i64(fp0
);
11921 check_cp1_64bitmode(ctx
);
11923 TCGv_i64 fp0
= tcg_temp_new_i64();
11924 TCGv_i64 fp1
= tcg_temp_new_i64();
11926 gen_load_fpr64(ctx
, fp0
, fs
);
11927 gen_load_fpr64(ctx
, fp1
, ft
);
11928 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11929 tcg_temp_free_i64(fp1
);
11930 gen_store_fpr64(ctx
, fp0
, fd
);
11931 tcg_temp_free_i64(fp0
);
11935 case OPC_MINA_D
: /* OPC_RECIP1_D */
11936 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11938 TCGv_i64 fp0
= tcg_temp_new_i64();
11939 TCGv_i64 fp1
= tcg_temp_new_i64();
11940 gen_load_fpr64(ctx
, fp0
, fs
);
11941 gen_load_fpr64(ctx
, fp1
, ft
);
11942 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11943 gen_store_fpr64(ctx
, fp1
, fd
);
11944 tcg_temp_free_i64(fp1
);
11945 tcg_temp_free_i64(fp0
);
11948 check_cp1_64bitmode(ctx
);
11950 TCGv_i64 fp0
= tcg_temp_new_i64();
11952 gen_load_fpr64(ctx
, fp0
, fs
);
11953 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11954 gen_store_fpr64(ctx
, fp0
, fd
);
11955 tcg_temp_free_i64(fp0
);
11959 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11960 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11962 TCGv_i64 fp0
= tcg_temp_new_i64();
11963 TCGv_i64 fp1
= tcg_temp_new_i64();
11964 gen_load_fpr64(ctx
, fp0
, fs
);
11965 gen_load_fpr64(ctx
, fp1
, ft
);
11966 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
11967 gen_store_fpr64(ctx
, fp1
, fd
);
11968 tcg_temp_free_i64(fp1
);
11969 tcg_temp_free_i64(fp0
);
11972 check_cp1_64bitmode(ctx
);
11974 TCGv_i64 fp0
= tcg_temp_new_i64();
11976 gen_load_fpr64(ctx
, fp0
, fs
);
11977 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
11978 gen_store_fpr64(ctx
, fp0
, fd
);
11979 tcg_temp_free_i64(fp0
);
11983 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
11984 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
11986 TCGv_i64 fp0
= tcg_temp_new_i64();
11987 TCGv_i64 fp1
= tcg_temp_new_i64();
11988 gen_load_fpr64(ctx
, fp0
, fs
);
11989 gen_load_fpr64(ctx
, fp1
, ft
);
11990 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
11991 gen_store_fpr64(ctx
, fp1
, fd
);
11992 tcg_temp_free_i64(fp1
);
11993 tcg_temp_free_i64(fp0
);
11996 check_cp1_64bitmode(ctx
);
11998 TCGv_i64 fp0
= tcg_temp_new_i64();
11999 TCGv_i64 fp1
= tcg_temp_new_i64();
12001 gen_load_fpr64(ctx
, fp0
, fs
);
12002 gen_load_fpr64(ctx
, fp1
, ft
);
12003 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
12004 tcg_temp_free_i64(fp1
);
12005 gen_store_fpr64(ctx
, fp0
, fd
);
12006 tcg_temp_free_i64(fp0
);
12013 case OPC_CMP_UEQ_D
:
12014 case OPC_CMP_OLT_D
:
12015 case OPC_CMP_ULT_D
:
12016 case OPC_CMP_OLE_D
:
12017 case OPC_CMP_ULE_D
:
12019 case OPC_CMP_NGLE_D
:
12020 case OPC_CMP_SEQ_D
:
12021 case OPC_CMP_NGL_D
:
12023 case OPC_CMP_NGE_D
:
12025 case OPC_CMP_NGT_D
:
12026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12027 if (ctx
->opcode
& (1 << 6)) {
12028 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
12030 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
12034 check_cp1_registers(ctx
, fs
);
12036 TCGv_i32 fp32
= tcg_temp_new_i32();
12037 TCGv_i64 fp64
= tcg_temp_new_i64();
12039 gen_load_fpr64(ctx
, fp64
, fs
);
12040 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
12041 tcg_temp_free_i64(fp64
);
12042 gen_store_fpr32(ctx
, fp32
, fd
);
12043 tcg_temp_free_i32(fp32
);
12047 check_cp1_registers(ctx
, fs
);
12049 TCGv_i32 fp32
= tcg_temp_new_i32();
12050 TCGv_i64 fp64
= tcg_temp_new_i64();
12052 gen_load_fpr64(ctx
, fp64
, fs
);
12053 if (ctx
->nan2008
) {
12054 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
12056 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
12058 tcg_temp_free_i64(fp64
);
12059 gen_store_fpr32(ctx
, fp32
, fd
);
12060 tcg_temp_free_i32(fp32
);
12064 check_cp1_64bitmode(ctx
);
12066 TCGv_i64 fp0
= tcg_temp_new_i64();
12068 gen_load_fpr64(ctx
, fp0
, fs
);
12069 if (ctx
->nan2008
) {
12070 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
12072 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
12074 gen_store_fpr64(ctx
, fp0
, fd
);
12075 tcg_temp_free_i64(fp0
);
12080 TCGv_i32 fp0
= tcg_temp_new_i32();
12082 gen_load_fpr32(ctx
, fp0
, fs
);
12083 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
12084 gen_store_fpr32(ctx
, fp0
, fd
);
12085 tcg_temp_free_i32(fp0
);
12089 check_cp1_registers(ctx
, fd
);
12091 TCGv_i32 fp32
= tcg_temp_new_i32();
12092 TCGv_i64 fp64
= tcg_temp_new_i64();
12094 gen_load_fpr32(ctx
, fp32
, fs
);
12095 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12096 tcg_temp_free_i32(fp32
);
12097 gen_store_fpr64(ctx
, fp64
, fd
);
12098 tcg_temp_free_i64(fp64
);
12102 check_cp1_64bitmode(ctx
);
12104 TCGv_i32 fp32
= tcg_temp_new_i32();
12105 TCGv_i64 fp64
= tcg_temp_new_i64();
12107 gen_load_fpr64(ctx
, fp64
, fs
);
12108 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12109 tcg_temp_free_i64(fp64
);
12110 gen_store_fpr32(ctx
, fp32
, fd
);
12111 tcg_temp_free_i32(fp32
);
12115 check_cp1_64bitmode(ctx
);
12117 TCGv_i64 fp0
= tcg_temp_new_i64();
12119 gen_load_fpr64(ctx
, fp0
, fs
);
12120 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12121 gen_store_fpr64(ctx
, fp0
, fd
);
12122 tcg_temp_free_i64(fp0
);
12125 case OPC_CVT_PS_PW
:
12128 TCGv_i64 fp0
= tcg_temp_new_i64();
12130 gen_load_fpr64(ctx
, fp0
, fs
);
12131 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12132 gen_store_fpr64(ctx
, fp0
, fd
);
12133 tcg_temp_free_i64(fp0
);
12139 TCGv_i64 fp0
= tcg_temp_new_i64();
12140 TCGv_i64 fp1
= tcg_temp_new_i64();
12142 gen_load_fpr64(ctx
, fp0
, fs
);
12143 gen_load_fpr64(ctx
, fp1
, ft
);
12144 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12145 tcg_temp_free_i64(fp1
);
12146 gen_store_fpr64(ctx
, fp0
, fd
);
12147 tcg_temp_free_i64(fp0
);
12153 TCGv_i64 fp0
= tcg_temp_new_i64();
12154 TCGv_i64 fp1
= tcg_temp_new_i64();
12156 gen_load_fpr64(ctx
, fp0
, fs
);
12157 gen_load_fpr64(ctx
, fp1
, ft
);
12158 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12159 tcg_temp_free_i64(fp1
);
12160 gen_store_fpr64(ctx
, fp0
, fd
);
12161 tcg_temp_free_i64(fp0
);
12167 TCGv_i64 fp0
= tcg_temp_new_i64();
12168 TCGv_i64 fp1
= tcg_temp_new_i64();
12170 gen_load_fpr64(ctx
, fp0
, fs
);
12171 gen_load_fpr64(ctx
, fp1
, ft
);
12172 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12173 tcg_temp_free_i64(fp1
);
12174 gen_store_fpr64(ctx
, fp0
, fd
);
12175 tcg_temp_free_i64(fp0
);
12181 TCGv_i64 fp0
= tcg_temp_new_i64();
12183 gen_load_fpr64(ctx
, fp0
, fs
);
12184 gen_helper_float_abs_ps(fp0
, fp0
);
12185 gen_store_fpr64(ctx
, fp0
, fd
);
12186 tcg_temp_free_i64(fp0
);
12192 TCGv_i64 fp0
= tcg_temp_new_i64();
12194 gen_load_fpr64(ctx
, fp0
, fs
);
12195 gen_store_fpr64(ctx
, fp0
, fd
);
12196 tcg_temp_free_i64(fp0
);
12202 TCGv_i64 fp0
= tcg_temp_new_i64();
12204 gen_load_fpr64(ctx
, fp0
, fs
);
12205 gen_helper_float_chs_ps(fp0
, fp0
);
12206 gen_store_fpr64(ctx
, fp0
, fd
);
12207 tcg_temp_free_i64(fp0
);
12212 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12217 TCGLabel
*l1
= gen_new_label();
12221 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12223 fp0
= tcg_temp_new_i64();
12224 gen_load_fpr64(ctx
, fp0
, fs
);
12225 gen_store_fpr64(ctx
, fp0
, fd
);
12226 tcg_temp_free_i64(fp0
);
12233 TCGLabel
*l1
= gen_new_label();
12237 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12238 fp0
= tcg_temp_new_i64();
12239 gen_load_fpr64(ctx
, fp0
, fs
);
12240 gen_store_fpr64(ctx
, fp0
, fd
);
12241 tcg_temp_free_i64(fp0
);
12249 TCGv_i64 fp0
= tcg_temp_new_i64();
12250 TCGv_i64 fp1
= tcg_temp_new_i64();
12252 gen_load_fpr64(ctx
, fp0
, ft
);
12253 gen_load_fpr64(ctx
, fp1
, fs
);
12254 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12255 tcg_temp_free_i64(fp1
);
12256 gen_store_fpr64(ctx
, fp0
, fd
);
12257 tcg_temp_free_i64(fp0
);
12263 TCGv_i64 fp0
= tcg_temp_new_i64();
12264 TCGv_i64 fp1
= tcg_temp_new_i64();
12266 gen_load_fpr64(ctx
, fp0
, ft
);
12267 gen_load_fpr64(ctx
, fp1
, fs
);
12268 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12269 tcg_temp_free_i64(fp1
);
12270 gen_store_fpr64(ctx
, fp0
, fd
);
12271 tcg_temp_free_i64(fp0
);
12274 case OPC_RECIP2_PS
:
12277 TCGv_i64 fp0
= tcg_temp_new_i64();
12278 TCGv_i64 fp1
= tcg_temp_new_i64();
12280 gen_load_fpr64(ctx
, fp0
, fs
);
12281 gen_load_fpr64(ctx
, fp1
, ft
);
12282 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12283 tcg_temp_free_i64(fp1
);
12284 gen_store_fpr64(ctx
, fp0
, fd
);
12285 tcg_temp_free_i64(fp0
);
12288 case OPC_RECIP1_PS
:
12291 TCGv_i64 fp0
= tcg_temp_new_i64();
12293 gen_load_fpr64(ctx
, fp0
, fs
);
12294 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12295 gen_store_fpr64(ctx
, fp0
, fd
);
12296 tcg_temp_free_i64(fp0
);
12299 case OPC_RSQRT1_PS
:
12302 TCGv_i64 fp0
= tcg_temp_new_i64();
12304 gen_load_fpr64(ctx
, fp0
, fs
);
12305 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12306 gen_store_fpr64(ctx
, fp0
, fd
);
12307 tcg_temp_free_i64(fp0
);
12310 case OPC_RSQRT2_PS
:
12313 TCGv_i64 fp0
= tcg_temp_new_i64();
12314 TCGv_i64 fp1
= tcg_temp_new_i64();
12316 gen_load_fpr64(ctx
, fp0
, fs
);
12317 gen_load_fpr64(ctx
, fp1
, ft
);
12318 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12319 tcg_temp_free_i64(fp1
);
12320 gen_store_fpr64(ctx
, fp0
, fd
);
12321 tcg_temp_free_i64(fp0
);
12325 check_cp1_64bitmode(ctx
);
12327 TCGv_i32 fp0
= tcg_temp_new_i32();
12329 gen_load_fpr32h(ctx
, fp0
, fs
);
12330 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12331 gen_store_fpr32(ctx
, fp0
, fd
);
12332 tcg_temp_free_i32(fp0
);
12335 case OPC_CVT_PW_PS
:
12338 TCGv_i64 fp0
= tcg_temp_new_i64();
12340 gen_load_fpr64(ctx
, fp0
, fs
);
12341 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12342 gen_store_fpr64(ctx
, fp0
, fd
);
12343 tcg_temp_free_i64(fp0
);
12347 check_cp1_64bitmode(ctx
);
12349 TCGv_i32 fp0
= tcg_temp_new_i32();
12351 gen_load_fpr32(ctx
, fp0
, fs
);
12352 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12353 gen_store_fpr32(ctx
, fp0
, fd
);
12354 tcg_temp_free_i32(fp0
);
12360 TCGv_i32 fp0
= tcg_temp_new_i32();
12361 TCGv_i32 fp1
= tcg_temp_new_i32();
12363 gen_load_fpr32(ctx
, fp0
, fs
);
12364 gen_load_fpr32(ctx
, fp1
, ft
);
12365 gen_store_fpr32h(ctx
, fp0
, fd
);
12366 gen_store_fpr32(ctx
, fp1
, fd
);
12367 tcg_temp_free_i32(fp0
);
12368 tcg_temp_free_i32(fp1
);
12374 TCGv_i32 fp0
= tcg_temp_new_i32();
12375 TCGv_i32 fp1
= tcg_temp_new_i32();
12377 gen_load_fpr32(ctx
, fp0
, fs
);
12378 gen_load_fpr32h(ctx
, fp1
, ft
);
12379 gen_store_fpr32(ctx
, fp1
, fd
);
12380 gen_store_fpr32h(ctx
, fp0
, fd
);
12381 tcg_temp_free_i32(fp0
);
12382 tcg_temp_free_i32(fp1
);
12388 TCGv_i32 fp0
= tcg_temp_new_i32();
12389 TCGv_i32 fp1
= tcg_temp_new_i32();
12391 gen_load_fpr32h(ctx
, fp0
, fs
);
12392 gen_load_fpr32(ctx
, fp1
, ft
);
12393 gen_store_fpr32(ctx
, fp1
, fd
);
12394 gen_store_fpr32h(ctx
, fp0
, fd
);
12395 tcg_temp_free_i32(fp0
);
12396 tcg_temp_free_i32(fp1
);
12402 TCGv_i32 fp0
= tcg_temp_new_i32();
12403 TCGv_i32 fp1
= tcg_temp_new_i32();
12405 gen_load_fpr32h(ctx
, fp0
, fs
);
12406 gen_load_fpr32h(ctx
, fp1
, ft
);
12407 gen_store_fpr32(ctx
, fp1
, fd
);
12408 gen_store_fpr32h(ctx
, fp0
, fd
);
12409 tcg_temp_free_i32(fp0
);
12410 tcg_temp_free_i32(fp1
);
12414 case OPC_CMP_UN_PS
:
12415 case OPC_CMP_EQ_PS
:
12416 case OPC_CMP_UEQ_PS
:
12417 case OPC_CMP_OLT_PS
:
12418 case OPC_CMP_ULT_PS
:
12419 case OPC_CMP_OLE_PS
:
12420 case OPC_CMP_ULE_PS
:
12421 case OPC_CMP_SF_PS
:
12422 case OPC_CMP_NGLE_PS
:
12423 case OPC_CMP_SEQ_PS
:
12424 case OPC_CMP_NGL_PS
:
12425 case OPC_CMP_LT_PS
:
12426 case OPC_CMP_NGE_PS
:
12427 case OPC_CMP_LE_PS
:
12428 case OPC_CMP_NGT_PS
:
12429 if (ctx
->opcode
& (1 << 6)) {
12430 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
12432 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
12436 MIPS_INVAL("farith");
12437 generate_exception_end(ctx
, EXCP_RI
);
12442 /* Coprocessor 3 (FPU) */
12443 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
12444 int fd
, int fs
, int base
, int index
)
12446 TCGv t0
= tcg_temp_new();
12449 gen_load_gpr(t0
, index
);
12450 } else if (index
== 0) {
12451 gen_load_gpr(t0
, base
);
12453 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12456 * Don't do NOP if destination is zero: we must perform the actual
12463 TCGv_i32 fp0
= tcg_temp_new_i32();
12465 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12466 tcg_gen_trunc_tl_i32(fp0
, t0
);
12467 gen_store_fpr32(ctx
, fp0
, fd
);
12468 tcg_temp_free_i32(fp0
);
12473 check_cp1_registers(ctx
, fd
);
12475 TCGv_i64 fp0
= tcg_temp_new_i64();
12476 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12477 gen_store_fpr64(ctx
, fp0
, fd
);
12478 tcg_temp_free_i64(fp0
);
12482 check_cp1_64bitmode(ctx
);
12483 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12485 TCGv_i64 fp0
= tcg_temp_new_i64();
12487 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12488 gen_store_fpr64(ctx
, fp0
, fd
);
12489 tcg_temp_free_i64(fp0
);
12495 TCGv_i32 fp0
= tcg_temp_new_i32();
12496 gen_load_fpr32(ctx
, fp0
, fs
);
12497 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12498 tcg_temp_free_i32(fp0
);
12503 check_cp1_registers(ctx
, fs
);
12505 TCGv_i64 fp0
= tcg_temp_new_i64();
12506 gen_load_fpr64(ctx
, fp0
, fs
);
12507 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12508 tcg_temp_free_i64(fp0
);
12512 check_cp1_64bitmode(ctx
);
12513 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12515 TCGv_i64 fp0
= tcg_temp_new_i64();
12516 gen_load_fpr64(ctx
, fp0
, fs
);
12517 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12518 tcg_temp_free_i64(fp0
);
12525 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
12526 int fd
, int fr
, int fs
, int ft
)
12532 TCGv t0
= tcg_temp_local_new();
12533 TCGv_i32 fp
= tcg_temp_new_i32();
12534 TCGv_i32 fph
= tcg_temp_new_i32();
12535 TCGLabel
*l1
= gen_new_label();
12536 TCGLabel
*l2
= gen_new_label();
12538 gen_load_gpr(t0
, fr
);
12539 tcg_gen_andi_tl(t0
, t0
, 0x7);
12541 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12542 gen_load_fpr32(ctx
, fp
, fs
);
12543 gen_load_fpr32h(ctx
, fph
, fs
);
12544 gen_store_fpr32(ctx
, fp
, fd
);
12545 gen_store_fpr32h(ctx
, fph
, fd
);
12548 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12550 #ifdef TARGET_WORDS_BIGENDIAN
12551 gen_load_fpr32(ctx
, fp
, fs
);
12552 gen_load_fpr32h(ctx
, fph
, ft
);
12553 gen_store_fpr32h(ctx
, fp
, fd
);
12554 gen_store_fpr32(ctx
, fph
, fd
);
12556 gen_load_fpr32h(ctx
, fph
, fs
);
12557 gen_load_fpr32(ctx
, fp
, ft
);
12558 gen_store_fpr32(ctx
, fph
, fd
);
12559 gen_store_fpr32h(ctx
, fp
, fd
);
12562 tcg_temp_free_i32(fp
);
12563 tcg_temp_free_i32(fph
);
12569 TCGv_i32 fp0
= tcg_temp_new_i32();
12570 TCGv_i32 fp1
= tcg_temp_new_i32();
12571 TCGv_i32 fp2
= tcg_temp_new_i32();
12573 gen_load_fpr32(ctx
, fp0
, fs
);
12574 gen_load_fpr32(ctx
, fp1
, ft
);
12575 gen_load_fpr32(ctx
, fp2
, fr
);
12576 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12577 tcg_temp_free_i32(fp0
);
12578 tcg_temp_free_i32(fp1
);
12579 gen_store_fpr32(ctx
, fp2
, fd
);
12580 tcg_temp_free_i32(fp2
);
12585 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12587 TCGv_i64 fp0
= tcg_temp_new_i64();
12588 TCGv_i64 fp1
= tcg_temp_new_i64();
12589 TCGv_i64 fp2
= tcg_temp_new_i64();
12591 gen_load_fpr64(ctx
, fp0
, fs
);
12592 gen_load_fpr64(ctx
, fp1
, ft
);
12593 gen_load_fpr64(ctx
, fp2
, fr
);
12594 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12595 tcg_temp_free_i64(fp0
);
12596 tcg_temp_free_i64(fp1
);
12597 gen_store_fpr64(ctx
, fp2
, fd
);
12598 tcg_temp_free_i64(fp2
);
12604 TCGv_i64 fp0
= tcg_temp_new_i64();
12605 TCGv_i64 fp1
= tcg_temp_new_i64();
12606 TCGv_i64 fp2
= tcg_temp_new_i64();
12608 gen_load_fpr64(ctx
, fp0
, fs
);
12609 gen_load_fpr64(ctx
, fp1
, ft
);
12610 gen_load_fpr64(ctx
, fp2
, fr
);
12611 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12612 tcg_temp_free_i64(fp0
);
12613 tcg_temp_free_i64(fp1
);
12614 gen_store_fpr64(ctx
, fp2
, fd
);
12615 tcg_temp_free_i64(fp2
);
12621 TCGv_i32 fp0
= tcg_temp_new_i32();
12622 TCGv_i32 fp1
= tcg_temp_new_i32();
12623 TCGv_i32 fp2
= tcg_temp_new_i32();
12625 gen_load_fpr32(ctx
, fp0
, fs
);
12626 gen_load_fpr32(ctx
, fp1
, ft
);
12627 gen_load_fpr32(ctx
, fp2
, fr
);
12628 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12629 tcg_temp_free_i32(fp0
);
12630 tcg_temp_free_i32(fp1
);
12631 gen_store_fpr32(ctx
, fp2
, fd
);
12632 tcg_temp_free_i32(fp2
);
12637 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12639 TCGv_i64 fp0
= tcg_temp_new_i64();
12640 TCGv_i64 fp1
= tcg_temp_new_i64();
12641 TCGv_i64 fp2
= tcg_temp_new_i64();
12643 gen_load_fpr64(ctx
, fp0
, fs
);
12644 gen_load_fpr64(ctx
, fp1
, ft
);
12645 gen_load_fpr64(ctx
, fp2
, fr
);
12646 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12647 tcg_temp_free_i64(fp0
);
12648 tcg_temp_free_i64(fp1
);
12649 gen_store_fpr64(ctx
, fp2
, fd
);
12650 tcg_temp_free_i64(fp2
);
12656 TCGv_i64 fp0
= tcg_temp_new_i64();
12657 TCGv_i64 fp1
= tcg_temp_new_i64();
12658 TCGv_i64 fp2
= tcg_temp_new_i64();
12660 gen_load_fpr64(ctx
, fp0
, fs
);
12661 gen_load_fpr64(ctx
, fp1
, ft
);
12662 gen_load_fpr64(ctx
, fp2
, fr
);
12663 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12664 tcg_temp_free_i64(fp0
);
12665 tcg_temp_free_i64(fp1
);
12666 gen_store_fpr64(ctx
, fp2
, fd
);
12667 tcg_temp_free_i64(fp2
);
12673 TCGv_i32 fp0
= tcg_temp_new_i32();
12674 TCGv_i32 fp1
= tcg_temp_new_i32();
12675 TCGv_i32 fp2
= tcg_temp_new_i32();
12677 gen_load_fpr32(ctx
, fp0
, fs
);
12678 gen_load_fpr32(ctx
, fp1
, ft
);
12679 gen_load_fpr32(ctx
, fp2
, fr
);
12680 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12681 tcg_temp_free_i32(fp0
);
12682 tcg_temp_free_i32(fp1
);
12683 gen_store_fpr32(ctx
, fp2
, fd
);
12684 tcg_temp_free_i32(fp2
);
12689 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12691 TCGv_i64 fp0
= tcg_temp_new_i64();
12692 TCGv_i64 fp1
= tcg_temp_new_i64();
12693 TCGv_i64 fp2
= tcg_temp_new_i64();
12695 gen_load_fpr64(ctx
, fp0
, fs
);
12696 gen_load_fpr64(ctx
, fp1
, ft
);
12697 gen_load_fpr64(ctx
, fp2
, fr
);
12698 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12699 tcg_temp_free_i64(fp0
);
12700 tcg_temp_free_i64(fp1
);
12701 gen_store_fpr64(ctx
, fp2
, fd
);
12702 tcg_temp_free_i64(fp2
);
12708 TCGv_i64 fp0
= tcg_temp_new_i64();
12709 TCGv_i64 fp1
= tcg_temp_new_i64();
12710 TCGv_i64 fp2
= tcg_temp_new_i64();
12712 gen_load_fpr64(ctx
, fp0
, fs
);
12713 gen_load_fpr64(ctx
, fp1
, ft
);
12714 gen_load_fpr64(ctx
, fp2
, fr
);
12715 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12716 tcg_temp_free_i64(fp0
);
12717 tcg_temp_free_i64(fp1
);
12718 gen_store_fpr64(ctx
, fp2
, fd
);
12719 tcg_temp_free_i64(fp2
);
12725 TCGv_i32 fp0
= tcg_temp_new_i32();
12726 TCGv_i32 fp1
= tcg_temp_new_i32();
12727 TCGv_i32 fp2
= tcg_temp_new_i32();
12729 gen_load_fpr32(ctx
, fp0
, fs
);
12730 gen_load_fpr32(ctx
, fp1
, ft
);
12731 gen_load_fpr32(ctx
, fp2
, fr
);
12732 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12733 tcg_temp_free_i32(fp0
);
12734 tcg_temp_free_i32(fp1
);
12735 gen_store_fpr32(ctx
, fp2
, fd
);
12736 tcg_temp_free_i32(fp2
);
12741 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12743 TCGv_i64 fp0
= tcg_temp_new_i64();
12744 TCGv_i64 fp1
= tcg_temp_new_i64();
12745 TCGv_i64 fp2
= tcg_temp_new_i64();
12747 gen_load_fpr64(ctx
, fp0
, fs
);
12748 gen_load_fpr64(ctx
, fp1
, ft
);
12749 gen_load_fpr64(ctx
, fp2
, fr
);
12750 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12751 tcg_temp_free_i64(fp0
);
12752 tcg_temp_free_i64(fp1
);
12753 gen_store_fpr64(ctx
, fp2
, fd
);
12754 tcg_temp_free_i64(fp2
);
12760 TCGv_i64 fp0
= tcg_temp_new_i64();
12761 TCGv_i64 fp1
= tcg_temp_new_i64();
12762 TCGv_i64 fp2
= tcg_temp_new_i64();
12764 gen_load_fpr64(ctx
, fp0
, fs
);
12765 gen_load_fpr64(ctx
, fp1
, ft
);
12766 gen_load_fpr64(ctx
, fp2
, fr
);
12767 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12768 tcg_temp_free_i64(fp0
);
12769 tcg_temp_free_i64(fp1
);
12770 gen_store_fpr64(ctx
, fp2
, fd
);
12771 tcg_temp_free_i64(fp2
);
12775 MIPS_INVAL("flt3_arith");
12776 generate_exception_end(ctx
, EXCP_RI
);
12781 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12785 #if !defined(CONFIG_USER_ONLY)
12787 * The Linux kernel will emulate rdhwr if it's not supported natively.
12788 * Therefore only check the ISA in system mode.
12790 check_insn(ctx
, ISA_MIPS32R2
);
12792 t0
= tcg_temp_new();
12796 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12797 gen_store_gpr(t0
, rt
);
12800 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12801 gen_store_gpr(t0
, rt
);
12804 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12807 gen_helper_rdhwr_cc(t0
, cpu_env
);
12808 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12811 gen_store_gpr(t0
, rt
);
12813 * Break the TB to be able to take timer interrupts immediately
12814 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
12815 * we break completely out of translated code.
12817 gen_save_pc(ctx
->base
.pc_next
+ 4);
12818 ctx
->base
.is_jmp
= DISAS_EXIT
;
12821 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12822 gen_store_gpr(t0
, rt
);
12825 check_insn(ctx
, ISA_MIPS32R6
);
12828 * Performance counter registers are not implemented other than
12829 * control register 0.
12831 generate_exception(ctx
, EXCP_RI
);
12833 gen_helper_rdhwr_performance(t0
, cpu_env
);
12834 gen_store_gpr(t0
, rt
);
12837 check_insn(ctx
, ISA_MIPS32R6
);
12838 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12839 gen_store_gpr(t0
, rt
);
12842 #if defined(CONFIG_USER_ONLY)
12843 tcg_gen_ld_tl(t0
, cpu_env
,
12844 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12845 gen_store_gpr(t0
, rt
);
12848 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12849 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12850 tcg_gen_ld_tl(t0
, cpu_env
,
12851 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12852 gen_store_gpr(t0
, rt
);
12854 generate_exception_end(ctx
, EXCP_RI
);
12858 default: /* Invalid */
12859 MIPS_INVAL("rdhwr");
12860 generate_exception_end(ctx
, EXCP_RI
);
12866 static inline void clear_branch_hflags(DisasContext
*ctx
)
12868 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12869 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12870 save_cpu_state(ctx
, 0);
12873 * It is not safe to save ctx->hflags as hflags may be changed
12874 * in execution time by the instruction in delay / forbidden slot.
12876 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12880 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12882 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12883 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12884 /* Branches completion */
12885 clear_branch_hflags(ctx
);
12886 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12887 /* FIXME: Need to clear can_do_io. */
12888 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12889 case MIPS_HFLAG_FBNSLOT
:
12890 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12893 /* unconditional branch */
12894 if (proc_hflags
& MIPS_HFLAG_BX
) {
12895 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12897 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12899 case MIPS_HFLAG_BL
:
12900 /* blikely taken case */
12901 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12903 case MIPS_HFLAG_BC
:
12904 /* Conditional branch */
12906 TCGLabel
*l1
= gen_new_label();
12908 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12909 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12911 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12914 case MIPS_HFLAG_BR
:
12915 /* unconditional branch to register */
12916 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12917 TCGv t0
= tcg_temp_new();
12918 TCGv_i32 t1
= tcg_temp_new_i32();
12920 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12921 tcg_gen_trunc_tl_i32(t1
, t0
);
12923 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12924 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12925 tcg_gen_or_i32(hflags
, hflags
, t1
);
12926 tcg_temp_free_i32(t1
);
12928 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12930 tcg_gen_mov_tl(cpu_PC
, btarget
);
12932 if (ctx
->base
.singlestep_enabled
) {
12933 save_cpu_state(ctx
, 0);
12934 gen_helper_raise_exception_debug(cpu_env
);
12936 tcg_gen_lookup_and_goto_ptr();
12939 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12945 /* Compact Branches */
12946 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12947 int rs
, int rt
, int32_t offset
)
12949 int bcond_compute
= 0;
12950 TCGv t0
= tcg_temp_new();
12951 TCGv t1
= tcg_temp_new();
12952 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12954 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12955 #ifdef MIPS_DEBUG_DISAS
12956 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12957 "\n", ctx
->base
.pc_next
);
12959 generate_exception_end(ctx
, EXCP_RI
);
12963 /* Load needed operands and calculate btarget */
12965 /* compact branch */
12966 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12967 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
12968 gen_load_gpr(t0
, rs
);
12969 gen_load_gpr(t1
, rt
);
12971 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12972 if (rs
<= rt
&& rs
== 0) {
12973 /* OPC_BEQZALC, OPC_BNEZALC */
12974 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12977 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
12978 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
12979 gen_load_gpr(t0
, rs
);
12980 gen_load_gpr(t1
, rt
);
12982 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12984 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
12985 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
12986 if (rs
== 0 || rs
== rt
) {
12987 /* OPC_BLEZALC, OPC_BGEZALC */
12988 /* OPC_BGTZALC, OPC_BLTZALC */
12989 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
12991 gen_load_gpr(t0
, rs
);
12992 gen_load_gpr(t1
, rt
);
12994 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
12998 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13003 /* OPC_BEQZC, OPC_BNEZC */
13004 gen_load_gpr(t0
, rs
);
13006 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13008 /* OPC_JIC, OPC_JIALC */
13009 TCGv tbase
= tcg_temp_new();
13010 TCGv toffset
= tcg_temp_new();
13012 gen_load_gpr(tbase
, rt
);
13013 tcg_gen_movi_tl(toffset
, offset
);
13014 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
13015 tcg_temp_free(tbase
);
13016 tcg_temp_free(toffset
);
13020 MIPS_INVAL("Compact branch/jump");
13021 generate_exception_end(ctx
, EXCP_RI
);
13025 if (bcond_compute
== 0) {
13026 /* Uncoditional compact branch */
13029 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13032 ctx
->hflags
|= MIPS_HFLAG_BR
;
13035 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13038 ctx
->hflags
|= MIPS_HFLAG_B
;
13041 MIPS_INVAL("Compact branch/jump");
13042 generate_exception_end(ctx
, EXCP_RI
);
13046 /* Generating branch here as compact branches don't have delay slot */
13047 gen_branch(ctx
, 4);
13049 /* Conditional compact branch */
13050 TCGLabel
*fs
= gen_new_label();
13051 save_cpu_state(ctx
, 0);
13054 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
13055 if (rs
== 0 && rt
!= 0) {
13057 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13058 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13060 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13063 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
13066 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
13067 if (rs
== 0 && rt
!= 0) {
13069 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13070 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13072 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13075 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
13078 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13079 if (rs
== 0 && rt
!= 0) {
13081 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13082 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13084 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13087 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
13090 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13091 if (rs
== 0 && rt
!= 0) {
13093 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13094 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13096 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13099 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13102 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13103 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13105 /* OPC_BOVC, OPC_BNVC */
13106 TCGv t2
= tcg_temp_new();
13107 TCGv t3
= tcg_temp_new();
13108 TCGv t4
= tcg_temp_new();
13109 TCGv input_overflow
= tcg_temp_new();
13111 gen_load_gpr(t0
, rs
);
13112 gen_load_gpr(t1
, rt
);
13113 tcg_gen_ext32s_tl(t2
, t0
);
13114 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13115 tcg_gen_ext32s_tl(t3
, t1
);
13116 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13117 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13119 tcg_gen_add_tl(t4
, t2
, t3
);
13120 tcg_gen_ext32s_tl(t4
, t4
);
13121 tcg_gen_xor_tl(t2
, t2
, t3
);
13122 tcg_gen_xor_tl(t3
, t4
, t3
);
13123 tcg_gen_andc_tl(t2
, t3
, t2
);
13124 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13125 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13126 if (opc
== OPC_BOVC
) {
13128 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13131 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13133 tcg_temp_free(input_overflow
);
13137 } else if (rs
< rt
&& rs
== 0) {
13138 /* OPC_BEQZALC, OPC_BNEZALC */
13139 if (opc
== OPC_BEQZALC
) {
13141 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13144 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13147 /* OPC_BEQC, OPC_BNEC */
13148 if (opc
== OPC_BEQC
) {
13150 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13153 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13158 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13161 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13164 MIPS_INVAL("Compact conditional branch/jump");
13165 generate_exception_end(ctx
, EXCP_RI
);
13169 /* Generating branch here as compact branches don't have delay slot */
13170 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13173 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13181 /* ISA extensions (ASEs) */
13182 /* MIPS16 extension to MIPS32 */
13184 /* MIPS16 major opcodes */
13186 M16_OPC_ADDIUSP
= 0x00,
13187 M16_OPC_ADDIUPC
= 0x01,
13189 M16_OPC_JAL
= 0x03,
13190 M16_OPC_BEQZ
= 0x04,
13191 M16_OPC_BNEQZ
= 0x05,
13192 M16_OPC_SHIFT
= 0x06,
13194 M16_OPC_RRIA
= 0x08,
13195 M16_OPC_ADDIU8
= 0x09,
13196 M16_OPC_SLTI
= 0x0a,
13197 M16_OPC_SLTIU
= 0x0b,
13200 M16_OPC_CMPI
= 0x0e,
13204 M16_OPC_LWSP
= 0x12,
13206 M16_OPC_LBU
= 0x14,
13207 M16_OPC_LHU
= 0x15,
13208 M16_OPC_LWPC
= 0x16,
13209 M16_OPC_LWU
= 0x17,
13212 M16_OPC_SWSP
= 0x1a,
13214 M16_OPC_RRR
= 0x1c,
13216 M16_OPC_EXTEND
= 0x1e,
13220 /* I8 funct field */
13239 /* RR funct field */
13273 /* I64 funct field */
13281 I64_DADDIUPC
= 0x6,
13285 /* RR ry field for CNVT */
13287 RR_RY_CNVT_ZEB
= 0x0,
13288 RR_RY_CNVT_ZEH
= 0x1,
13289 RR_RY_CNVT_ZEW
= 0x2,
13290 RR_RY_CNVT_SEB
= 0x4,
13291 RR_RY_CNVT_SEH
= 0x5,
13292 RR_RY_CNVT_SEW
= 0x6,
13295 static int xlat(int r
)
13297 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13302 static void gen_mips16_save(DisasContext
*ctx
,
13303 int xsregs
, int aregs
,
13304 int do_ra
, int do_s0
, int do_s1
,
13307 TCGv t0
= tcg_temp_new();
13308 TCGv t1
= tcg_temp_new();
13309 TCGv t2
= tcg_temp_new();
13339 generate_exception_end(ctx
, EXCP_RI
);
13345 gen_base_offset_addr(ctx
, t0
, 29, 12);
13346 gen_load_gpr(t1
, 7);
13347 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13350 gen_base_offset_addr(ctx
, t0
, 29, 8);
13351 gen_load_gpr(t1
, 6);
13352 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13355 gen_base_offset_addr(ctx
, t0
, 29, 4);
13356 gen_load_gpr(t1
, 5);
13357 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13360 gen_base_offset_addr(ctx
, t0
, 29, 0);
13361 gen_load_gpr(t1
, 4);
13362 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13365 gen_load_gpr(t0
, 29);
13367 #define DECR_AND_STORE(reg) do { \
13368 tcg_gen_movi_tl(t2, -4); \
13369 gen_op_addr_add(ctx, t0, t0, t2); \
13370 gen_load_gpr(t1, reg); \
13371 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13375 DECR_AND_STORE(31);
13380 DECR_AND_STORE(30);
13383 DECR_AND_STORE(23);
13386 DECR_AND_STORE(22);
13389 DECR_AND_STORE(21);
13392 DECR_AND_STORE(20);
13395 DECR_AND_STORE(19);
13398 DECR_AND_STORE(18);
13402 DECR_AND_STORE(17);
13405 DECR_AND_STORE(16);
13435 generate_exception_end(ctx
, EXCP_RI
);
13451 #undef DECR_AND_STORE
13453 tcg_gen_movi_tl(t2
, -framesize
);
13454 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13460 static void gen_mips16_restore(DisasContext
*ctx
,
13461 int xsregs
, int aregs
,
13462 int do_ra
, int do_s0
, int do_s1
,
13466 TCGv t0
= tcg_temp_new();
13467 TCGv t1
= tcg_temp_new();
13468 TCGv t2
= tcg_temp_new();
13470 tcg_gen_movi_tl(t2
, framesize
);
13471 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13473 #define DECR_AND_LOAD(reg) do { \
13474 tcg_gen_movi_tl(t2, -4); \
13475 gen_op_addr_add(ctx, t0, t0, t2); \
13476 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13477 gen_store_gpr(t1, reg); \
13541 generate_exception_end(ctx
, EXCP_RI
);
13557 #undef DECR_AND_LOAD
13559 tcg_gen_movi_tl(t2
, framesize
);
13560 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13566 static void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
13567 int is_64_bit
, int extended
)
13571 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13572 generate_exception_end(ctx
, EXCP_RI
);
13576 t0
= tcg_temp_new();
13578 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13579 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13581 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13587 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13590 TCGv_i32 t0
= tcg_const_i32(op
);
13591 TCGv t1
= tcg_temp_new();
13592 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13593 gen_helper_cache(cpu_env
, t1
, t0
);
13596 #if defined(TARGET_MIPS64)
13597 static void decode_i64_mips16(DisasContext
*ctx
,
13598 int ry
, int funct
, int16_t offset
,
13603 check_insn(ctx
, ISA_MIPS3
);
13604 check_mips_64(ctx
);
13605 offset
= extended
? offset
: offset
<< 3;
13606 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13609 check_insn(ctx
, ISA_MIPS3
);
13610 check_mips_64(ctx
);
13611 offset
= extended
? offset
: offset
<< 3;
13612 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13615 check_insn(ctx
, ISA_MIPS3
);
13616 check_mips_64(ctx
);
13617 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13618 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13621 check_insn(ctx
, ISA_MIPS3
);
13622 check_mips_64(ctx
);
13623 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13624 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13627 check_insn(ctx
, ISA_MIPS3
);
13628 check_mips_64(ctx
);
13629 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13630 generate_exception_end(ctx
, EXCP_RI
);
13632 offset
= extended
? offset
: offset
<< 3;
13633 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13637 check_insn(ctx
, ISA_MIPS3
);
13638 check_mips_64(ctx
);
13639 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13640 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13643 check_insn(ctx
, ISA_MIPS3
);
13644 check_mips_64(ctx
);
13645 offset
= extended
? offset
: offset
<< 2;
13646 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13649 check_insn(ctx
, ISA_MIPS3
);
13650 check_mips_64(ctx
);
13651 offset
= extended
? offset
: offset
<< 2;
13652 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13658 static int decode_extended_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13660 int extend
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13661 int op
, rx
, ry
, funct
, sa
;
13662 int16_t imm
, offset
;
13664 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13665 op
= (ctx
->opcode
>> 11) & 0x1f;
13666 sa
= (ctx
->opcode
>> 22) & 0x1f;
13667 funct
= (ctx
->opcode
>> 8) & 0x7;
13668 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13669 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13670 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13671 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13672 | (ctx
->opcode
& 0x1f));
13675 * The extended opcodes cleverly reuse the opcodes from their 16-bit
13679 case M16_OPC_ADDIUSP
:
13680 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13682 case M16_OPC_ADDIUPC
:
13683 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13686 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13687 /* No delay slot, so just process as a normal instruction */
13690 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13691 /* No delay slot, so just process as a normal instruction */
13693 case M16_OPC_BNEQZ
:
13694 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13695 /* No delay slot, so just process as a normal instruction */
13697 case M16_OPC_SHIFT
:
13698 switch (ctx
->opcode
& 0x3) {
13700 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13703 #if defined(TARGET_MIPS64)
13704 check_mips_64(ctx
);
13705 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13707 generate_exception_end(ctx
, EXCP_RI
);
13711 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13714 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13718 #if defined(TARGET_MIPS64)
13720 check_insn(ctx
, ISA_MIPS3
);
13721 check_mips_64(ctx
);
13722 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13726 imm
= ctx
->opcode
& 0xf;
13727 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13728 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13729 imm
= (int16_t) (imm
<< 1) >> 1;
13730 if ((ctx
->opcode
>> 4) & 0x1) {
13731 #if defined(TARGET_MIPS64)
13732 check_mips_64(ctx
);
13733 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13735 generate_exception_end(ctx
, EXCP_RI
);
13738 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13741 case M16_OPC_ADDIU8
:
13742 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13745 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13747 case M16_OPC_SLTIU
:
13748 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13753 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13756 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13759 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13762 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13765 check_insn(ctx
, ISA_MIPS32
);
13767 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13768 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13769 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13770 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13771 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13772 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13773 | (ctx
->opcode
& 0xf)) << 3;
13775 if (ctx
->opcode
& (1 << 7)) {
13776 gen_mips16_save(ctx
, xsregs
, aregs
,
13777 do_ra
, do_s0
, do_s1
,
13780 gen_mips16_restore(ctx
, xsregs
, aregs
,
13781 do_ra
, do_s0
, do_s1
,
13787 generate_exception_end(ctx
, EXCP_RI
);
13792 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13795 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13797 #if defined(TARGET_MIPS64)
13799 check_insn(ctx
, ISA_MIPS3
);
13800 check_mips_64(ctx
);
13801 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13805 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13808 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13811 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13814 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13817 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13820 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13823 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13825 #if defined(TARGET_MIPS64)
13827 check_insn(ctx
, ISA_MIPS3
);
13828 check_mips_64(ctx
);
13829 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13833 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13836 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13839 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13842 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13844 #if defined(TARGET_MIPS64)
13846 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13850 generate_exception_end(ctx
, EXCP_RI
);
13857 static inline bool is_uhi(int sdbbp_code
)
13859 #ifdef CONFIG_USER_ONLY
13862 return semihosting_enabled() && sdbbp_code
== 1;
13866 #ifdef CONFIG_USER_ONLY
13867 /* The above should dead-code away any calls to this..*/
13868 static inline void gen_helper_do_semihosting(void *env
)
13870 g_assert_not_reached();
13874 static int decode_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13878 int op
, cnvt_op
, op1
, offset
;
13882 op
= (ctx
->opcode
>> 11) & 0x1f;
13883 sa
= (ctx
->opcode
>> 2) & 0x7;
13884 sa
= sa
== 0 ? 8 : sa
;
13885 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13886 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13887 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13888 op1
= offset
= ctx
->opcode
& 0x1f;
13893 case M16_OPC_ADDIUSP
:
13895 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13897 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13900 case M16_OPC_ADDIUPC
:
13901 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13904 offset
= (ctx
->opcode
& 0x7ff) << 1;
13905 offset
= (int16_t)(offset
<< 4) >> 4;
13906 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13907 /* No delay slot, so just process as a normal instruction */
13910 offset
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
13911 offset
= (((ctx
->opcode
& 0x1f) << 21)
13912 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13914 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13915 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13919 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13920 ((int8_t)ctx
->opcode
) << 1, 0);
13921 /* No delay slot, so just process as a normal instruction */
13923 case M16_OPC_BNEQZ
:
13924 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13925 ((int8_t)ctx
->opcode
) << 1, 0);
13926 /* No delay slot, so just process as a normal instruction */
13928 case M16_OPC_SHIFT
:
13929 switch (ctx
->opcode
& 0x3) {
13931 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13934 #if defined(TARGET_MIPS64)
13935 check_insn(ctx
, ISA_MIPS3
);
13936 check_mips_64(ctx
);
13937 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13939 generate_exception_end(ctx
, EXCP_RI
);
13943 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13946 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13950 #if defined(TARGET_MIPS64)
13952 check_insn(ctx
, ISA_MIPS3
);
13953 check_mips_64(ctx
);
13954 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13959 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13961 if ((ctx
->opcode
>> 4) & 1) {
13962 #if defined(TARGET_MIPS64)
13963 check_insn(ctx
, ISA_MIPS3
);
13964 check_mips_64(ctx
);
13965 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13967 generate_exception_end(ctx
, EXCP_RI
);
13970 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13974 case M16_OPC_ADDIU8
:
13976 int16_t imm
= (int8_t) ctx
->opcode
;
13978 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13983 int16_t imm
= (uint8_t) ctx
->opcode
;
13984 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13987 case M16_OPC_SLTIU
:
13989 int16_t imm
= (uint8_t) ctx
->opcode
;
13990 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13997 funct
= (ctx
->opcode
>> 8) & 0x7;
14000 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
14001 ((int8_t)ctx
->opcode
) << 1, 0);
14004 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
14005 ((int8_t)ctx
->opcode
) << 1, 0);
14008 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
14011 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
14012 ((int8_t)ctx
->opcode
) << 3);
14015 check_insn(ctx
, ISA_MIPS32
);
14017 int do_ra
= ctx
->opcode
& (1 << 6);
14018 int do_s0
= ctx
->opcode
& (1 << 5);
14019 int do_s1
= ctx
->opcode
& (1 << 4);
14020 int framesize
= ctx
->opcode
& 0xf;
14022 if (framesize
== 0) {
14025 framesize
= framesize
<< 3;
14028 if (ctx
->opcode
& (1 << 7)) {
14029 gen_mips16_save(ctx
, 0, 0,
14030 do_ra
, do_s0
, do_s1
, framesize
);
14032 gen_mips16_restore(ctx
, 0, 0,
14033 do_ra
, do_s0
, do_s1
, framesize
);
14039 int rz
= xlat(ctx
->opcode
& 0x7);
14041 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
14042 ((ctx
->opcode
>> 5) & 0x7);
14043 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
14047 reg32
= ctx
->opcode
& 0x1f;
14048 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
14051 generate_exception_end(ctx
, EXCP_RI
);
14058 int16_t imm
= (uint8_t) ctx
->opcode
;
14060 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
14065 int16_t imm
= (uint8_t) ctx
->opcode
;
14066 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
14069 #if defined(TARGET_MIPS64)
14071 check_insn(ctx
, ISA_MIPS3
);
14072 check_mips_64(ctx
);
14073 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
14077 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
14080 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
14083 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14086 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
14089 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
14092 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
14095 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
14097 #if defined(TARGET_MIPS64)
14099 check_insn(ctx
, ISA_MIPS3
);
14100 check_mips_64(ctx
);
14101 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
14105 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14108 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14111 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14114 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14118 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14121 switch (ctx
->opcode
& 0x3) {
14123 mips32_op
= OPC_ADDU
;
14126 mips32_op
= OPC_SUBU
;
14128 #if defined(TARGET_MIPS64)
14130 mips32_op
= OPC_DADDU
;
14131 check_insn(ctx
, ISA_MIPS3
);
14132 check_mips_64(ctx
);
14135 mips32_op
= OPC_DSUBU
;
14136 check_insn(ctx
, ISA_MIPS3
);
14137 check_mips_64(ctx
);
14141 generate_exception_end(ctx
, EXCP_RI
);
14145 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14154 int nd
= (ctx
->opcode
>> 7) & 0x1;
14155 int link
= (ctx
->opcode
>> 6) & 0x1;
14156 int ra
= (ctx
->opcode
>> 5) & 0x1;
14159 check_insn(ctx
, ISA_MIPS32
);
14168 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14173 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14174 gen_helper_do_semihosting(cpu_env
);
14177 * XXX: not clear which exception should be raised
14178 * when in debug mode...
14180 check_insn(ctx
, ISA_MIPS32
);
14181 generate_exception_end(ctx
, EXCP_DBp
);
14185 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14188 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14191 generate_exception_end(ctx
, EXCP_BREAK
);
14194 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14197 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14200 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14202 #if defined(TARGET_MIPS64)
14204 check_insn(ctx
, ISA_MIPS3
);
14205 check_mips_64(ctx
);
14206 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14210 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14213 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14216 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14219 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14222 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14225 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14228 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14231 check_insn(ctx
, ISA_MIPS32
);
14233 case RR_RY_CNVT_ZEB
:
14234 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14236 case RR_RY_CNVT_ZEH
:
14237 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14239 case RR_RY_CNVT_SEB
:
14240 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14242 case RR_RY_CNVT_SEH
:
14243 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14245 #if defined (TARGET_MIPS64)
14246 case RR_RY_CNVT_ZEW
:
14247 check_insn(ctx
, ISA_MIPS64
);
14248 check_mips_64(ctx
);
14249 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14251 case RR_RY_CNVT_SEW
:
14252 check_insn(ctx
, ISA_MIPS64
);
14253 check_mips_64(ctx
);
14254 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14258 generate_exception_end(ctx
, EXCP_RI
);
14263 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14265 #if defined(TARGET_MIPS64)
14267 check_insn(ctx
, ISA_MIPS3
);
14268 check_mips_64(ctx
);
14269 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14272 check_insn(ctx
, ISA_MIPS3
);
14273 check_mips_64(ctx
);
14274 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14277 check_insn(ctx
, ISA_MIPS3
);
14278 check_mips_64(ctx
);
14279 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14282 check_insn(ctx
, ISA_MIPS3
);
14283 check_mips_64(ctx
);
14284 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14288 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14291 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14294 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14297 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14299 #if defined(TARGET_MIPS64)
14301 check_insn(ctx
, ISA_MIPS3
);
14302 check_mips_64(ctx
);
14303 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14306 check_insn(ctx
, ISA_MIPS3
);
14307 check_mips_64(ctx
);
14308 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14311 check_insn(ctx
, ISA_MIPS3
);
14312 check_mips_64(ctx
);
14313 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14316 check_insn(ctx
, ISA_MIPS3
);
14317 check_mips_64(ctx
);
14318 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14322 generate_exception_end(ctx
, EXCP_RI
);
14326 case M16_OPC_EXTEND
:
14327 decode_extended_mips16_opc(env
, ctx
);
14330 #if defined(TARGET_MIPS64)
14332 funct
= (ctx
->opcode
>> 8) & 0x7;
14333 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14337 generate_exception_end(ctx
, EXCP_RI
);
14344 /* microMIPS extension to MIPS32/MIPS64 */
14347 * microMIPS32/microMIPS64 major opcodes
14349 * 1. MIPS Architecture for Programmers Volume II-B:
14350 * The microMIPS32 Instruction Set (Revision 3.05)
14352 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14354 * 2. MIPS Architecture For Programmers Volume II-A:
14355 * The MIPS64 Instruction Set (Revision 3.51)
14385 POOL32S
= 0x16, /* MIPS64 */
14386 DADDIU32
= 0x17, /* MIPS64 */
14415 /* 0x29 is reserved */
14428 /* 0x31 is reserved */
14441 SD32
= 0x36, /* MIPS64 */
14442 LD32
= 0x37, /* MIPS64 */
14444 /* 0x39 is reserved */
14460 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14482 /* POOL32A encoding of minor opcode field */
14486 * These opcodes are distinguished only by bits 9..6; those bits are
14487 * what are recorded below.
14525 /* The following can be distinguished by their lower 6 bits. */
14535 /* POOL32AXF encoding of minor opcode field extension */
14538 * 1. MIPS Architecture for Programmers Volume II-B:
14539 * The microMIPS32 Instruction Set (Revision 3.05)
14541 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14543 * 2. MIPS Architecture for Programmers VolumeIV-e:
14544 * The MIPS DSP Application-Specific Extension
14545 * to the microMIPS32 Architecture (Revision 2.34)
14547 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14562 /* begin of microMIPS32 DSP */
14564 /* bits 13..12 for 0x01 */
14570 /* bits 13..12 for 0x2a */
14576 /* bits 13..12 for 0x32 */
14580 /* end of microMIPS32 DSP */
14582 /* bits 15..12 for 0x2c */
14599 /* bits 15..12 for 0x34 */
14607 /* bits 15..12 for 0x3c */
14609 JR
= 0x0, /* alias */
14617 /* bits 15..12 for 0x05 */
14621 /* bits 15..12 for 0x0d */
14633 /* bits 15..12 for 0x15 */
14639 /* bits 15..12 for 0x1d */
14643 /* bits 15..12 for 0x2d */
14648 /* bits 15..12 for 0x35 */
14655 /* POOL32B encoding of minor opcode field (bits 15..12) */
14671 /* POOL32C encoding of minor opcode field (bits 15..12) */
14692 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14705 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14718 /* POOL32F encoding of minor opcode field (bits 5..0) */
14721 /* These are the bit 7..6 values */
14730 /* These are the bit 8..6 values */
14755 MOVZ_FMT_05
= 0x05,
14789 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14796 /* POOL32Fxf encoding of minor opcode extension field */
14834 /* POOL32I encoding of minor opcode field (bits 25..21) */
14864 /* These overlap and are distinguished by bit16 of the instruction */
14873 /* POOL16A encoding of minor opcode field */
14880 /* POOL16B encoding of minor opcode field */
14887 /* POOL16C encoding of minor opcode field */
14907 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14931 /* POOL16D encoding of minor opcode field */
14938 /* POOL16E encoding of minor opcode field */
14945 static int mmreg(int r
)
14947 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14952 /* Used for 16-bit store instructions. */
14953 static int mmreg2(int r
)
14955 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14960 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14961 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14962 #define uMIPS_RS2(op) uMIPS_RS(op)
14963 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14964 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14965 #define uMIPS_RS5(op) (op & 0x1f)
14967 /* Signed immediate */
14968 #define SIMM(op, start, width) \
14969 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
14972 /* Zero-extended immediate */
14973 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
14975 static void gen_addiur1sp(DisasContext
*ctx
)
14977 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14979 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
14982 static void gen_addiur2(DisasContext
*ctx
)
14984 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
14985 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14986 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14988 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
14991 static void gen_addiusp(DisasContext
*ctx
)
14993 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
14996 if (encoded
<= 1) {
14997 decoded
= 256 + encoded
;
14998 } else if (encoded
<= 255) {
15000 } else if (encoded
<= 509) {
15001 decoded
= encoded
- 512;
15003 decoded
= encoded
- 768;
15006 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
15009 static void gen_addius5(DisasContext
*ctx
)
15011 int imm
= SIMM(ctx
->opcode
, 1, 4);
15012 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15014 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
15017 static void gen_andi16(DisasContext
*ctx
)
15019 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
15020 31, 32, 63, 64, 255, 32768, 65535 };
15021 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15022 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15023 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
15025 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
15028 static void gen_ldst_multiple(DisasContext
*ctx
, uint32_t opc
, int reglist
,
15029 int base
, int16_t offset
)
15034 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
15035 generate_exception_end(ctx
, EXCP_RI
);
15039 t0
= tcg_temp_new();
15041 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15043 t1
= tcg_const_tl(reglist
);
15044 t2
= tcg_const_i32(ctx
->mem_idx
);
15046 save_cpu_state(ctx
, 1);
15049 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
15052 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
15054 #ifdef TARGET_MIPS64
15056 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
15059 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
15065 tcg_temp_free_i32(t2
);
15069 static void gen_pool16c_insn(DisasContext
*ctx
)
15071 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
15072 int rs
= mmreg(ctx
->opcode
& 0x7);
15074 switch (((ctx
->opcode
) >> 4) & 0x3f) {
15079 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
15085 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
15091 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
15097 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
15104 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15105 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15107 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15116 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15117 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15119 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15126 int reg
= ctx
->opcode
& 0x1f;
15128 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15134 int reg
= ctx
->opcode
& 0x1f;
15135 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15137 * Let normal delay slot handling in our caller take us
15138 * to the branch target.
15144 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15145 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15149 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15150 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15154 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15158 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15161 generate_exception_end(ctx
, EXCP_BREAK
);
15164 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15165 gen_helper_do_semihosting(cpu_env
);
15168 * XXX: not clear which exception should be raised
15169 * when in debug mode...
15171 check_insn(ctx
, ISA_MIPS32
);
15172 generate_exception_end(ctx
, EXCP_DBp
);
15175 case JRADDIUSP
+ 0:
15176 case JRADDIUSP
+ 1:
15178 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15179 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15180 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15182 * Let normal delay slot handling in our caller take us
15183 * to the branch target.
15188 generate_exception_end(ctx
, EXCP_RI
);
15193 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15196 int rd
, rs
, re
, rt
;
15197 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15198 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15199 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15200 rd
= rd_enc
[enc_dest
];
15201 re
= re_enc
[enc_dest
];
15202 rs
= rs_rt_enc
[enc_rs
];
15203 rt
= rs_rt_enc
[enc_rt
];
15205 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15207 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15210 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15212 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15216 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15218 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15219 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15221 switch (ctx
->opcode
& 0xf) {
15223 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15226 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15230 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15231 int offset
= extract32(ctx
->opcode
, 4, 4);
15232 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15235 case R6_JRC16
: /* JRCADDIUSP */
15236 if ((ctx
->opcode
>> 4) & 1) {
15238 int imm
= extract32(ctx
->opcode
, 5, 5);
15239 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15240 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15243 rs
= extract32(ctx
->opcode
, 5, 5);
15244 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15256 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15257 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15258 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15259 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15263 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15266 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15270 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15271 int offset
= extract32(ctx
->opcode
, 4, 4);
15272 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15275 case JALRC16
: /* BREAK16, SDBBP16 */
15276 switch (ctx
->opcode
& 0x3f) {
15278 case JALRC16
+ 0x20:
15280 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15285 generate_exception(ctx
, EXCP_BREAK
);
15289 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15290 gen_helper_do_semihosting(cpu_env
);
15292 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15293 generate_exception(ctx
, EXCP_RI
);
15295 generate_exception(ctx
, EXCP_DBp
);
15302 generate_exception(ctx
, EXCP_RI
);
15307 static void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
15309 TCGv t0
= tcg_temp_new();
15310 TCGv t1
= tcg_temp_new();
15312 gen_load_gpr(t0
, base
);
15315 gen_load_gpr(t1
, index
);
15316 tcg_gen_shli_tl(t1
, t1
, 2);
15317 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15320 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15321 gen_store_gpr(t1
, rd
);
15327 static void gen_ldst_pair(DisasContext
*ctx
, uint32_t opc
, int rd
,
15328 int base
, int16_t offset
)
15332 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15333 generate_exception_end(ctx
, EXCP_RI
);
15337 t0
= tcg_temp_new();
15338 t1
= tcg_temp_new();
15340 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15345 generate_exception_end(ctx
, EXCP_RI
);
15348 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15349 gen_store_gpr(t1
, rd
);
15350 tcg_gen_movi_tl(t1
, 4);
15351 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15352 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15353 gen_store_gpr(t1
, rd
+ 1);
15356 gen_load_gpr(t1
, rd
);
15357 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15358 tcg_gen_movi_tl(t1
, 4);
15359 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15360 gen_load_gpr(t1
, rd
+ 1);
15361 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15363 #ifdef TARGET_MIPS64
15366 generate_exception_end(ctx
, EXCP_RI
);
15369 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15370 gen_store_gpr(t1
, rd
);
15371 tcg_gen_movi_tl(t1
, 8);
15372 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15373 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15374 gen_store_gpr(t1
, rd
+ 1);
15377 gen_load_gpr(t1
, rd
);
15378 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15379 tcg_gen_movi_tl(t1
, 8);
15380 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15381 gen_load_gpr(t1
, rd
+ 1);
15382 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15390 static void gen_sync(int stype
)
15392 TCGBar tcg_mo
= TCG_BAR_SC
;
15395 case 0x4: /* SYNC_WMB */
15396 tcg_mo
|= TCG_MO_ST_ST
;
15398 case 0x10: /* SYNC_MB */
15399 tcg_mo
|= TCG_MO_ALL
;
15401 case 0x11: /* SYNC_ACQUIRE */
15402 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15404 case 0x12: /* SYNC_RELEASE */
15405 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15407 case 0x13: /* SYNC_RMB */
15408 tcg_mo
|= TCG_MO_LD_LD
;
15411 tcg_mo
|= TCG_MO_ALL
;
15415 tcg_gen_mb(tcg_mo
);
15418 static void gen_pool32axf(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15420 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15421 int minor
= (ctx
->opcode
>> 12) & 0xf;
15422 uint32_t mips32_op
;
15424 switch (extension
) {
15426 mips32_op
= OPC_TEQ
;
15429 mips32_op
= OPC_TGE
;
15432 mips32_op
= OPC_TGEU
;
15435 mips32_op
= OPC_TLT
;
15438 mips32_op
= OPC_TLTU
;
15441 mips32_op
= OPC_TNE
;
15443 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15445 #ifndef CONFIG_USER_ONLY
15448 check_cp0_enabled(ctx
);
15450 /* Treat as NOP. */
15453 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15457 check_cp0_enabled(ctx
);
15459 TCGv t0
= tcg_temp_new();
15461 gen_load_gpr(t0
, rt
);
15462 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15468 switch (minor
& 3) {
15470 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15473 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15476 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15479 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15482 goto pool32axf_invalid
;
15486 switch (minor
& 3) {
15488 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15491 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15494 goto pool32axf_invalid
;
15500 check_insn(ctx
, ISA_MIPS32R6
);
15501 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15504 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15507 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15510 mips32_op
= OPC_CLO
;
15513 mips32_op
= OPC_CLZ
;
15515 check_insn(ctx
, ISA_MIPS32
);
15516 gen_cl(ctx
, mips32_op
, rt
, rs
);
15519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15520 gen_rdhwr(ctx
, rt
, rs
, 0);
15523 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15526 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15527 mips32_op
= OPC_MULT
;
15530 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15531 mips32_op
= OPC_MULTU
;
15534 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15535 mips32_op
= OPC_DIV
;
15538 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15539 mips32_op
= OPC_DIVU
;
15542 check_insn(ctx
, ISA_MIPS32
);
15543 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15546 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15547 mips32_op
= OPC_MADD
;
15550 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15551 mips32_op
= OPC_MADDU
;
15554 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15555 mips32_op
= OPC_MSUB
;
15558 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15559 mips32_op
= OPC_MSUBU
;
15561 check_insn(ctx
, ISA_MIPS32
);
15562 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15565 goto pool32axf_invalid
;
15576 generate_exception_err(ctx
, EXCP_CpU
, 2);
15579 goto pool32axf_invalid
;
15584 case JALR
: /* JALRC */
15585 case JALR_HB
: /* JALRC_HB */
15586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15587 /* JALRC, JALRC_HB */
15588 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15590 /* JALR, JALR_HB */
15591 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15592 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15597 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15598 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15599 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15602 goto pool32axf_invalid
;
15608 check_cp0_enabled(ctx
);
15609 check_insn(ctx
, ISA_MIPS32R2
);
15610 gen_load_srsgpr(rs
, rt
);
15613 check_cp0_enabled(ctx
);
15614 check_insn(ctx
, ISA_MIPS32R2
);
15615 gen_store_srsgpr(rs
, rt
);
15618 goto pool32axf_invalid
;
15621 #ifndef CONFIG_USER_ONLY
15625 mips32_op
= OPC_TLBP
;
15628 mips32_op
= OPC_TLBR
;
15631 mips32_op
= OPC_TLBWI
;
15634 mips32_op
= OPC_TLBWR
;
15637 mips32_op
= OPC_TLBINV
;
15640 mips32_op
= OPC_TLBINVF
;
15643 mips32_op
= OPC_WAIT
;
15646 mips32_op
= OPC_DERET
;
15649 mips32_op
= OPC_ERET
;
15651 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15654 goto pool32axf_invalid
;
15660 check_cp0_enabled(ctx
);
15662 TCGv t0
= tcg_temp_new();
15664 save_cpu_state(ctx
, 1);
15665 gen_helper_di(t0
, cpu_env
);
15666 gen_store_gpr(t0
, rs
);
15667 /* Stop translation as we may have switched the execution mode */
15668 ctx
->base
.is_jmp
= DISAS_STOP
;
15673 check_cp0_enabled(ctx
);
15675 TCGv t0
= tcg_temp_new();
15677 save_cpu_state(ctx
, 1);
15678 gen_helper_ei(t0
, cpu_env
);
15679 gen_store_gpr(t0
, rs
);
15681 * DISAS_STOP isn't sufficient, we need to ensure we break out
15682 * of translated code to check for pending interrupts.
15684 gen_save_pc(ctx
->base
.pc_next
+ 4);
15685 ctx
->base
.is_jmp
= DISAS_EXIT
;
15690 goto pool32axf_invalid
;
15697 gen_sync(extract32(ctx
->opcode
, 16, 5));
15700 generate_exception_end(ctx
, EXCP_SYSCALL
);
15703 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15704 gen_helper_do_semihosting(cpu_env
);
15706 check_insn(ctx
, ISA_MIPS32
);
15707 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15708 generate_exception_end(ctx
, EXCP_RI
);
15710 generate_exception_end(ctx
, EXCP_DBp
);
15715 goto pool32axf_invalid
;
15719 switch (minor
& 3) {
15721 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15724 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15727 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15730 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15733 goto pool32axf_invalid
;
15737 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15740 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15743 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15746 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15749 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15752 goto pool32axf_invalid
;
15757 MIPS_INVAL("pool32axf");
15758 generate_exception_end(ctx
, EXCP_RI
);
15764 * Values for microMIPS fmt field. Variable-width, depending on which
15765 * formats the instruction supports.
15784 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15786 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15787 uint32_t mips32_op
;
15789 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
15790 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
15791 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
15793 switch (extension
) {
15794 case FLOAT_1BIT_FMT(CFC1
, 0):
15795 mips32_op
= OPC_CFC1
;
15797 case FLOAT_1BIT_FMT(CTC1
, 0):
15798 mips32_op
= OPC_CTC1
;
15800 case FLOAT_1BIT_FMT(MFC1
, 0):
15801 mips32_op
= OPC_MFC1
;
15803 case FLOAT_1BIT_FMT(MTC1
, 0):
15804 mips32_op
= OPC_MTC1
;
15806 case FLOAT_1BIT_FMT(MFHC1
, 0):
15807 mips32_op
= OPC_MFHC1
;
15809 case FLOAT_1BIT_FMT(MTHC1
, 0):
15810 mips32_op
= OPC_MTHC1
;
15812 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15815 /* Reciprocal square root */
15816 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15817 mips32_op
= OPC_RSQRT_S
;
15819 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15820 mips32_op
= OPC_RSQRT_D
;
15824 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15825 mips32_op
= OPC_SQRT_S
;
15827 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15828 mips32_op
= OPC_SQRT_D
;
15832 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15833 mips32_op
= OPC_RECIP_S
;
15835 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15836 mips32_op
= OPC_RECIP_D
;
15840 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15841 mips32_op
= OPC_FLOOR_L_S
;
15843 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15844 mips32_op
= OPC_FLOOR_L_D
;
15846 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15847 mips32_op
= OPC_FLOOR_W_S
;
15849 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15850 mips32_op
= OPC_FLOOR_W_D
;
15854 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15855 mips32_op
= OPC_CEIL_L_S
;
15857 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15858 mips32_op
= OPC_CEIL_L_D
;
15860 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15861 mips32_op
= OPC_CEIL_W_S
;
15863 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15864 mips32_op
= OPC_CEIL_W_D
;
15868 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15869 mips32_op
= OPC_TRUNC_L_S
;
15871 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15872 mips32_op
= OPC_TRUNC_L_D
;
15874 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15875 mips32_op
= OPC_TRUNC_W_S
;
15877 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15878 mips32_op
= OPC_TRUNC_W_D
;
15882 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15883 mips32_op
= OPC_ROUND_L_S
;
15885 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15886 mips32_op
= OPC_ROUND_L_D
;
15888 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15889 mips32_op
= OPC_ROUND_W_S
;
15891 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15892 mips32_op
= OPC_ROUND_W_D
;
15895 /* Integer to floating-point conversion */
15896 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15897 mips32_op
= OPC_CVT_L_S
;
15899 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15900 mips32_op
= OPC_CVT_L_D
;
15902 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15903 mips32_op
= OPC_CVT_W_S
;
15905 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15906 mips32_op
= OPC_CVT_W_D
;
15909 /* Paired-foo conversions */
15910 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15911 mips32_op
= OPC_CVT_S_PL
;
15913 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15914 mips32_op
= OPC_CVT_S_PU
;
15916 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15917 mips32_op
= OPC_CVT_PW_PS
;
15919 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15920 mips32_op
= OPC_CVT_PS_PW
;
15923 /* Floating-point moves */
15924 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15925 mips32_op
= OPC_MOV_S
;
15927 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15928 mips32_op
= OPC_MOV_D
;
15930 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15931 mips32_op
= OPC_MOV_PS
;
15934 /* Absolute value */
15935 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15936 mips32_op
= OPC_ABS_S
;
15938 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15939 mips32_op
= OPC_ABS_D
;
15941 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15942 mips32_op
= OPC_ABS_PS
;
15946 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15947 mips32_op
= OPC_NEG_S
;
15949 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15950 mips32_op
= OPC_NEG_D
;
15952 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15953 mips32_op
= OPC_NEG_PS
;
15956 /* Reciprocal square root step */
15957 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15958 mips32_op
= OPC_RSQRT1_S
;
15960 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15961 mips32_op
= OPC_RSQRT1_D
;
15963 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15964 mips32_op
= OPC_RSQRT1_PS
;
15967 /* Reciprocal step */
15968 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
15969 mips32_op
= OPC_RECIP1_S
;
15971 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
15972 mips32_op
= OPC_RECIP1_S
;
15974 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
15975 mips32_op
= OPC_RECIP1_PS
;
15978 /* Conversions from double */
15979 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
15980 mips32_op
= OPC_CVT_D_S
;
15982 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
15983 mips32_op
= OPC_CVT_D_W
;
15985 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
15986 mips32_op
= OPC_CVT_D_L
;
15989 /* Conversions from single */
15990 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
15991 mips32_op
= OPC_CVT_S_D
;
15993 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
15994 mips32_op
= OPC_CVT_S_W
;
15996 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
15997 mips32_op
= OPC_CVT_S_L
;
15999 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
16002 /* Conditional moves on floating-point codes */
16003 case COND_FLOAT_MOV(MOVT
, 0):
16004 case COND_FLOAT_MOV(MOVT
, 1):
16005 case COND_FLOAT_MOV(MOVT
, 2):
16006 case COND_FLOAT_MOV(MOVT
, 3):
16007 case COND_FLOAT_MOV(MOVT
, 4):
16008 case COND_FLOAT_MOV(MOVT
, 5):
16009 case COND_FLOAT_MOV(MOVT
, 6):
16010 case COND_FLOAT_MOV(MOVT
, 7):
16011 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16012 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
16014 case COND_FLOAT_MOV(MOVF
, 0):
16015 case COND_FLOAT_MOV(MOVF
, 1):
16016 case COND_FLOAT_MOV(MOVF
, 2):
16017 case COND_FLOAT_MOV(MOVF
, 3):
16018 case COND_FLOAT_MOV(MOVF
, 4):
16019 case COND_FLOAT_MOV(MOVF
, 5):
16020 case COND_FLOAT_MOV(MOVF
, 6):
16021 case COND_FLOAT_MOV(MOVF
, 7):
16022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16023 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
16026 MIPS_INVAL("pool32fxf");
16027 generate_exception_end(ctx
, EXCP_RI
);
16032 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
16036 int rt
, rs
, rd
, rr
;
16038 uint32_t op
, minor
, minor2
, mips32_op
;
16039 uint32_t cond
, fmt
, cc
;
16041 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
16042 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
16044 rt
= (ctx
->opcode
>> 21) & 0x1f;
16045 rs
= (ctx
->opcode
>> 16) & 0x1f;
16046 rd
= (ctx
->opcode
>> 11) & 0x1f;
16047 rr
= (ctx
->opcode
>> 6) & 0x1f;
16048 imm
= (int16_t) ctx
->opcode
;
16050 op
= (ctx
->opcode
>> 26) & 0x3f;
16053 minor
= ctx
->opcode
& 0x3f;
16056 minor
= (ctx
->opcode
>> 6) & 0xf;
16059 mips32_op
= OPC_SLL
;
16062 mips32_op
= OPC_SRA
;
16065 mips32_op
= OPC_SRL
;
16068 mips32_op
= OPC_ROTR
;
16070 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
16073 check_insn(ctx
, ISA_MIPS32R6
);
16074 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
16077 check_insn(ctx
, ISA_MIPS32R6
);
16078 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
16081 check_insn(ctx
, ISA_MIPS32R6
);
16082 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
16085 goto pool32a_invalid
;
16089 minor
= (ctx
->opcode
>> 6) & 0xf;
16093 mips32_op
= OPC_ADD
;
16096 mips32_op
= OPC_ADDU
;
16099 mips32_op
= OPC_SUB
;
16102 mips32_op
= OPC_SUBU
;
16105 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16106 mips32_op
= OPC_MUL
;
16108 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
16112 mips32_op
= OPC_SLLV
;
16115 mips32_op
= OPC_SRLV
;
16118 mips32_op
= OPC_SRAV
;
16121 mips32_op
= OPC_ROTRV
;
16123 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16125 /* Logical operations */
16127 mips32_op
= OPC_AND
;
16130 mips32_op
= OPC_OR
;
16133 mips32_op
= OPC_NOR
;
16136 mips32_op
= OPC_XOR
;
16138 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16140 /* Set less than */
16142 mips32_op
= OPC_SLT
;
16145 mips32_op
= OPC_SLTU
;
16147 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16150 goto pool32a_invalid
;
16154 minor
= (ctx
->opcode
>> 6) & 0xf;
16156 /* Conditional moves */
16157 case MOVN
: /* MUL */
16158 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16160 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16163 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16166 case MOVZ
: /* MUH */
16167 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16169 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16172 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16176 check_insn(ctx
, ISA_MIPS32R6
);
16177 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16180 check_insn(ctx
, ISA_MIPS32R6
);
16181 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16183 case LWXS
: /* DIV */
16184 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16186 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16189 gen_ldxs(ctx
, rs
, rt
, rd
);
16193 check_insn(ctx
, ISA_MIPS32R6
);
16194 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16197 check_insn(ctx
, ISA_MIPS32R6
);
16198 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16201 check_insn(ctx
, ISA_MIPS32R6
);
16202 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16205 goto pool32a_invalid
;
16209 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16212 check_insn(ctx
, ISA_MIPS32R6
);
16213 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
16214 extract32(ctx
->opcode
, 9, 2));
16217 check_insn(ctx
, ISA_MIPS32R6
);
16218 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16221 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16224 gen_pool32axf(env
, ctx
, rt
, rs
);
16227 generate_exception_end(ctx
, EXCP_BREAK
);
16230 check_insn(ctx
, ISA_MIPS32R6
);
16231 generate_exception_end(ctx
, EXCP_RI
);
16235 MIPS_INVAL("pool32a");
16236 generate_exception_end(ctx
, EXCP_RI
);
16241 minor
= (ctx
->opcode
>> 12) & 0xf;
16244 check_cp0_enabled(ctx
);
16245 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16246 gen_cache_operation(ctx
, rt
, rs
, imm
);
16251 /* COP2: Not implemented. */
16252 generate_exception_err(ctx
, EXCP_CpU
, 2);
16254 #ifdef TARGET_MIPS64
16257 check_insn(ctx
, ISA_MIPS3
);
16258 check_mips_64(ctx
);
16263 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16265 #ifdef TARGET_MIPS64
16268 check_insn(ctx
, ISA_MIPS3
);
16269 check_mips_64(ctx
);
16274 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16277 MIPS_INVAL("pool32b");
16278 generate_exception_end(ctx
, EXCP_RI
);
16283 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16284 minor
= ctx
->opcode
& 0x3f;
16285 check_cp1_enabled(ctx
);
16288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16289 mips32_op
= OPC_ALNV_PS
;
16292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16293 mips32_op
= OPC_MADD_S
;
16296 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16297 mips32_op
= OPC_MADD_D
;
16300 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16301 mips32_op
= OPC_MADD_PS
;
16304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16305 mips32_op
= OPC_MSUB_S
;
16308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16309 mips32_op
= OPC_MSUB_D
;
16312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16313 mips32_op
= OPC_MSUB_PS
;
16316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16317 mips32_op
= OPC_NMADD_S
;
16320 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16321 mips32_op
= OPC_NMADD_D
;
16324 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16325 mips32_op
= OPC_NMADD_PS
;
16328 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16329 mips32_op
= OPC_NMSUB_S
;
16332 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16333 mips32_op
= OPC_NMSUB_D
;
16336 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16337 mips32_op
= OPC_NMSUB_PS
;
16339 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16341 case CABS_COND_FMT
:
16342 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16343 cond
= (ctx
->opcode
>> 6) & 0xf;
16344 cc
= (ctx
->opcode
>> 13) & 0x7;
16345 fmt
= (ctx
->opcode
>> 10) & 0x3;
16348 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16351 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16354 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16357 goto pool32f_invalid
;
16361 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16362 cond
= (ctx
->opcode
>> 6) & 0xf;
16363 cc
= (ctx
->opcode
>> 13) & 0x7;
16364 fmt
= (ctx
->opcode
>> 10) & 0x3;
16367 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16370 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16373 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16376 goto pool32f_invalid
;
16380 check_insn(ctx
, ISA_MIPS32R6
);
16381 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16384 check_insn(ctx
, ISA_MIPS32R6
);
16385 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16388 gen_pool32fxf(ctx
, rt
, rs
);
16392 switch ((ctx
->opcode
>> 6) & 0x7) {
16394 mips32_op
= OPC_PLL_PS
;
16397 mips32_op
= OPC_PLU_PS
;
16400 mips32_op
= OPC_PUL_PS
;
16403 mips32_op
= OPC_PUU_PS
;
16406 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16407 mips32_op
= OPC_CVT_PS_S
;
16409 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16412 goto pool32f_invalid
;
16416 check_insn(ctx
, ISA_MIPS32R6
);
16417 switch ((ctx
->opcode
>> 9) & 0x3) {
16419 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16422 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16425 goto pool32f_invalid
;
16430 switch ((ctx
->opcode
>> 6) & 0x7) {
16432 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16433 mips32_op
= OPC_LWXC1
;
16436 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16437 mips32_op
= OPC_SWXC1
;
16440 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16441 mips32_op
= OPC_LDXC1
;
16444 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16445 mips32_op
= OPC_SDXC1
;
16448 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16449 mips32_op
= OPC_LUXC1
;
16452 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16453 mips32_op
= OPC_SUXC1
;
16455 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16458 goto pool32f_invalid
;
16462 check_insn(ctx
, ISA_MIPS32R6
);
16463 switch ((ctx
->opcode
>> 9) & 0x3) {
16465 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16468 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16471 goto pool32f_invalid
;
16476 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16477 fmt
= (ctx
->opcode
>> 9) & 0x3;
16478 switch ((ctx
->opcode
>> 6) & 0x7) {
16482 mips32_op
= OPC_RSQRT2_S
;
16485 mips32_op
= OPC_RSQRT2_D
;
16488 mips32_op
= OPC_RSQRT2_PS
;
16491 goto pool32f_invalid
;
16497 mips32_op
= OPC_RECIP2_S
;
16500 mips32_op
= OPC_RECIP2_D
;
16503 mips32_op
= OPC_RECIP2_PS
;
16506 goto pool32f_invalid
;
16510 mips32_op
= OPC_ADDR_PS
;
16513 mips32_op
= OPC_MULR_PS
;
16515 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16518 goto pool32f_invalid
;
16522 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16523 cc
= (ctx
->opcode
>> 13) & 0x7;
16524 fmt
= (ctx
->opcode
>> 9) & 0x3;
16525 switch ((ctx
->opcode
>> 6) & 0x7) {
16526 case MOVF_FMT
: /* RINT_FMT */
16527 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16531 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16534 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16537 goto pool32f_invalid
;
16543 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16546 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16550 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16553 goto pool32f_invalid
;
16557 case MOVT_FMT
: /* CLASS_FMT */
16558 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16562 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16565 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16568 goto pool32f_invalid
;
16574 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16577 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16581 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16584 goto pool32f_invalid
;
16589 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16592 goto pool32f_invalid
;
16595 #define FINSN_3ARG_SDPS(prfx) \
16596 switch ((ctx->opcode >> 8) & 0x3) { \
16598 mips32_op = OPC_##prfx##_S; \
16601 mips32_op = OPC_##prfx##_D; \
16603 case FMT_SDPS_PS: \
16605 mips32_op = OPC_##prfx##_PS; \
16608 goto pool32f_invalid; \
16611 check_insn(ctx
, ISA_MIPS32R6
);
16612 switch ((ctx
->opcode
>> 9) & 0x3) {
16614 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16617 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16620 goto pool32f_invalid
;
16624 check_insn(ctx
, ISA_MIPS32R6
);
16625 switch ((ctx
->opcode
>> 9) & 0x3) {
16627 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16630 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16633 goto pool32f_invalid
;
16637 /* regular FP ops */
16638 switch ((ctx
->opcode
>> 6) & 0x3) {
16640 FINSN_3ARG_SDPS(ADD
);
16643 FINSN_3ARG_SDPS(SUB
);
16646 FINSN_3ARG_SDPS(MUL
);
16649 fmt
= (ctx
->opcode
>> 8) & 0x3;
16651 mips32_op
= OPC_DIV_D
;
16652 } else if (fmt
== 0) {
16653 mips32_op
= OPC_DIV_S
;
16655 goto pool32f_invalid
;
16659 goto pool32f_invalid
;
16664 switch ((ctx
->opcode
>> 6) & 0x7) {
16665 case MOVN_FMT
: /* SELEQZ_FMT */
16666 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16668 switch ((ctx
->opcode
>> 9) & 0x3) {
16670 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16673 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16676 goto pool32f_invalid
;
16680 FINSN_3ARG_SDPS(MOVN
);
16684 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16685 FINSN_3ARG_SDPS(MOVN
);
16687 case MOVZ_FMT
: /* SELNEZ_FMT */
16688 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16690 switch ((ctx
->opcode
>> 9) & 0x3) {
16692 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16695 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16698 goto pool32f_invalid
;
16702 FINSN_3ARG_SDPS(MOVZ
);
16706 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16707 FINSN_3ARG_SDPS(MOVZ
);
16710 check_insn(ctx
, ISA_MIPS32R6
);
16711 switch ((ctx
->opcode
>> 9) & 0x3) {
16713 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16716 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16719 goto pool32f_invalid
;
16723 check_insn(ctx
, ISA_MIPS32R6
);
16724 switch ((ctx
->opcode
>> 9) & 0x3) {
16726 mips32_op
= OPC_MADDF_S
;
16729 mips32_op
= OPC_MADDF_D
;
16732 goto pool32f_invalid
;
16736 check_insn(ctx
, ISA_MIPS32R6
);
16737 switch ((ctx
->opcode
>> 9) & 0x3) {
16739 mips32_op
= OPC_MSUBF_S
;
16742 mips32_op
= OPC_MSUBF_D
;
16745 goto pool32f_invalid
;
16749 goto pool32f_invalid
;
16753 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16757 MIPS_INVAL("pool32f");
16758 generate_exception_end(ctx
, EXCP_RI
);
16762 generate_exception_err(ctx
, EXCP_CpU
, 1);
16766 minor
= (ctx
->opcode
>> 21) & 0x1f;
16769 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16770 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16773 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16774 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16775 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16778 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16779 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16780 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16783 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16784 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16787 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16788 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16789 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16792 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16793 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16794 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16797 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16798 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16801 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16802 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16806 case TLTI
: /* BC1EQZC */
16807 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16809 check_cp1_enabled(ctx
);
16810 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16813 mips32_op
= OPC_TLTI
;
16817 case TGEI
: /* BC1NEZC */
16818 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16820 check_cp1_enabled(ctx
);
16821 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16824 mips32_op
= OPC_TGEI
;
16829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16830 mips32_op
= OPC_TLTIU
;
16833 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16834 mips32_op
= OPC_TGEIU
;
16836 case TNEI
: /* SYNCI */
16837 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
16840 * Break the TB to be able to sync copied instructions
16843 ctx
->base
.is_jmp
= DISAS_STOP
;
16846 mips32_op
= OPC_TNEI
;
16851 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16852 mips32_op
= OPC_TEQI
;
16854 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16860 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16861 4, rs
, 0, imm
<< 1, 0);
16863 * Compact branches don't have a delay slot, so just let
16864 * the normal delay slot handling take us to the branch
16869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16870 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16875 * Break the TB to be able to sync copied instructions
16878 ctx
->base
.is_jmp
= DISAS_STOP
;
16882 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16883 /* COP2: Not implemented. */
16884 generate_exception_err(ctx
, EXCP_CpU
, 2);
16887 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16888 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16891 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16892 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16895 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16896 mips32_op
= OPC_BC1FANY4
;
16899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16900 mips32_op
= OPC_BC1TANY4
;
16903 check_insn(ctx
, ASE_MIPS3D
);
16906 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16907 check_cp1_enabled(ctx
);
16908 gen_compute_branch1(ctx
, mips32_op
,
16909 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16911 generate_exception_err(ctx
, EXCP_CpU
, 1);
16916 /* MIPS DSP: not implemented */
16919 MIPS_INVAL("pool32i");
16920 generate_exception_end(ctx
, EXCP_RI
);
16925 minor
= (ctx
->opcode
>> 12) & 0xf;
16926 offset
= sextract32(ctx
->opcode
, 0,
16927 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
16930 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16931 mips32_op
= OPC_LWL
;
16934 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16935 mips32_op
= OPC_SWL
;
16938 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16939 mips32_op
= OPC_LWR
;
16942 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16943 mips32_op
= OPC_SWR
;
16945 #if defined(TARGET_MIPS64)
16947 check_insn(ctx
, ISA_MIPS3
);
16948 check_mips_64(ctx
);
16949 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16950 mips32_op
= OPC_LDL
;
16953 check_insn(ctx
, ISA_MIPS3
);
16954 check_mips_64(ctx
);
16955 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16956 mips32_op
= OPC_SDL
;
16959 check_insn(ctx
, ISA_MIPS3
);
16960 check_mips_64(ctx
);
16961 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16962 mips32_op
= OPC_LDR
;
16965 check_insn(ctx
, ISA_MIPS3
);
16966 check_mips_64(ctx
);
16967 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
16968 mips32_op
= OPC_SDR
;
16971 check_insn(ctx
, ISA_MIPS3
);
16972 check_mips_64(ctx
);
16973 mips32_op
= OPC_LWU
;
16976 check_insn(ctx
, ISA_MIPS3
);
16977 check_mips_64(ctx
);
16978 mips32_op
= OPC_LLD
;
16982 mips32_op
= OPC_LL
;
16985 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
16988 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
16991 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
16993 #if defined(TARGET_MIPS64)
16995 check_insn(ctx
, ISA_MIPS3
);
16996 check_mips_64(ctx
);
16997 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
17002 MIPS_INVAL("pool32c ld-eva");
17003 generate_exception_end(ctx
, EXCP_RI
);
17006 check_cp0_enabled(ctx
);
17008 minor2
= (ctx
->opcode
>> 9) & 0x7;
17009 offset
= sextract32(ctx
->opcode
, 0, 9);
17012 mips32_op
= OPC_LBUE
;
17015 mips32_op
= OPC_LHUE
;
17018 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17019 mips32_op
= OPC_LWLE
;
17022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17023 mips32_op
= OPC_LWRE
;
17026 mips32_op
= OPC_LBE
;
17029 mips32_op
= OPC_LHE
;
17032 mips32_op
= OPC_LLE
;
17035 mips32_op
= OPC_LWE
;
17041 MIPS_INVAL("pool32c st-eva");
17042 generate_exception_end(ctx
, EXCP_RI
);
17045 check_cp0_enabled(ctx
);
17047 minor2
= (ctx
->opcode
>> 9) & 0x7;
17048 offset
= sextract32(ctx
->opcode
, 0, 9);
17051 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17052 mips32_op
= OPC_SWLE
;
17055 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17056 mips32_op
= OPC_SWRE
;
17059 /* Treat as no-op */
17060 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17061 /* hint codes 24-31 are reserved and signal RI */
17062 generate_exception(ctx
, EXCP_RI
);
17066 /* Treat as no-op */
17067 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17068 gen_cache_operation(ctx
, rt
, rs
, offset
);
17072 mips32_op
= OPC_SBE
;
17075 mips32_op
= OPC_SHE
;
17078 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
17081 mips32_op
= OPC_SWE
;
17086 /* Treat as no-op */
17087 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
17088 /* hint codes 24-31 are reserved and signal RI */
17089 generate_exception(ctx
, EXCP_RI
);
17093 MIPS_INVAL("pool32c");
17094 generate_exception_end(ctx
, EXCP_RI
);
17098 case ADDI32
: /* AUI, LUI */
17099 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17101 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
17104 mips32_op
= OPC_ADDI
;
17109 mips32_op
= OPC_ADDIU
;
17111 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17114 /* Logical operations */
17116 mips32_op
= OPC_ORI
;
17119 mips32_op
= OPC_XORI
;
17122 mips32_op
= OPC_ANDI
;
17124 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17127 /* Set less than immediate */
17129 mips32_op
= OPC_SLTI
;
17132 mips32_op
= OPC_SLTIU
;
17134 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17137 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17138 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17139 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17140 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17142 case JALS32
: /* BOVC, BEQC, BEQZALC */
17143 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17146 mips32_op
= OPC_BOVC
;
17147 } else if (rs
< rt
&& rs
== 0) {
17149 mips32_op
= OPC_BEQZALC
;
17152 mips32_op
= OPC_BEQC
;
17154 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17157 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17158 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17159 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17162 case BEQ32
: /* BC */
17163 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17165 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17166 sextract32(ctx
->opcode
<< 1, 0, 27));
17169 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17172 case BNE32
: /* BALC */
17173 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17175 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17176 sextract32(ctx
->opcode
<< 1, 0, 27));
17179 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17182 case J32
: /* BGTZC, BLTZC, BLTC */
17183 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17184 if (rs
== 0 && rt
!= 0) {
17186 mips32_op
= OPC_BGTZC
;
17187 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17189 mips32_op
= OPC_BLTZC
;
17192 mips32_op
= OPC_BLTC
;
17194 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17197 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17198 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17201 case JAL32
: /* BLEZC, BGEZC, BGEC */
17202 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17203 if (rs
== 0 && rt
!= 0) {
17205 mips32_op
= OPC_BLEZC
;
17206 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17208 mips32_op
= OPC_BGEZC
;
17211 mips32_op
= OPC_BGEC
;
17213 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17216 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17217 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17218 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17221 /* Floating point (COP1) */
17223 mips32_op
= OPC_LWC1
;
17226 mips32_op
= OPC_LDC1
;
17229 mips32_op
= OPC_SWC1
;
17232 mips32_op
= OPC_SDC1
;
17234 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17236 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17237 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17238 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17239 switch ((ctx
->opcode
>> 16) & 0x1f) {
17248 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17251 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17254 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17264 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17267 generate_exception(ctx
, EXCP_RI
);
17272 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17273 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17275 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17278 case BNVC
: /* BNEC, BNEZALC */
17279 check_insn(ctx
, ISA_MIPS32R6
);
17282 mips32_op
= OPC_BNVC
;
17283 } else if (rs
< rt
&& rs
== 0) {
17285 mips32_op
= OPC_BNEZALC
;
17288 mips32_op
= OPC_BNEC
;
17290 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17292 case R6_BNEZC
: /* JIALC */
17293 check_insn(ctx
, ISA_MIPS32R6
);
17296 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17297 sextract32(ctx
->opcode
<< 1, 0, 22));
17300 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17303 case R6_BEQZC
: /* JIC */
17304 check_insn(ctx
, ISA_MIPS32R6
);
17307 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17308 sextract32(ctx
->opcode
<< 1, 0, 22));
17311 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17314 case BLEZALC
: /* BGEZALC, BGEUC */
17315 check_insn(ctx
, ISA_MIPS32R6
);
17316 if (rs
== 0 && rt
!= 0) {
17318 mips32_op
= OPC_BLEZALC
;
17319 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17321 mips32_op
= OPC_BGEZALC
;
17324 mips32_op
= OPC_BGEUC
;
17326 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17328 case BGTZALC
: /* BLTZALC, BLTUC */
17329 check_insn(ctx
, ISA_MIPS32R6
);
17330 if (rs
== 0 && rt
!= 0) {
17332 mips32_op
= OPC_BGTZALC
;
17333 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17335 mips32_op
= OPC_BLTZALC
;
17338 mips32_op
= OPC_BLTUC
;
17340 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17342 /* Loads and stores */
17344 mips32_op
= OPC_LB
;
17347 mips32_op
= OPC_LBU
;
17350 mips32_op
= OPC_LH
;
17353 mips32_op
= OPC_LHU
;
17356 mips32_op
= OPC_LW
;
17358 #ifdef TARGET_MIPS64
17360 check_insn(ctx
, ISA_MIPS3
);
17361 check_mips_64(ctx
);
17362 mips32_op
= OPC_LD
;
17365 check_insn(ctx
, ISA_MIPS3
);
17366 check_mips_64(ctx
);
17367 mips32_op
= OPC_SD
;
17371 mips32_op
= OPC_SB
;
17374 mips32_op
= OPC_SH
;
17377 mips32_op
= OPC_SW
;
17380 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17383 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17386 generate_exception_end(ctx
, EXCP_RI
);
17391 static int decode_micromips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
17395 /* make sure instructions are on a halfword boundary */
17396 if (ctx
->base
.pc_next
& 0x1) {
17397 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17398 generate_exception_end(ctx
, EXCP_AdEL
);
17402 op
= (ctx
->opcode
>> 10) & 0x3f;
17403 /* Enforce properly-sized instructions in a delay slot */
17404 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17405 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17407 /* POOL32A, POOL32B, POOL32I, POOL32C */
17409 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17411 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17413 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17415 /* LB32, LH32, LWC132, LDC132, LW32 */
17416 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17417 generate_exception_end(ctx
, EXCP_RI
);
17422 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17424 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17426 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17427 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17428 generate_exception_end(ctx
, EXCP_RI
);
17438 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17439 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17440 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17443 switch (ctx
->opcode
& 0x1) {
17451 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17453 * In the Release 6, the register number location in
17454 * the instruction encoding has changed.
17456 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17458 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17464 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17465 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17466 int amount
= (ctx
->opcode
>> 1) & 0x7;
17468 amount
= amount
== 0 ? 8 : amount
;
17470 switch (ctx
->opcode
& 0x1) {
17479 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17483 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17484 gen_pool16c_r6_insn(ctx
);
17486 gen_pool16c_insn(ctx
);
17491 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17492 int rb
= 28; /* GP */
17493 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17495 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17499 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17500 if (ctx
->opcode
& 1) {
17501 generate_exception_end(ctx
, EXCP_RI
);
17504 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17505 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17506 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17507 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17512 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17513 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17514 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17515 offset
= (offset
== 0xf ? -1 : offset
);
17517 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17522 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17523 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17524 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17526 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17531 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17532 int rb
= 29; /* SP */
17533 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17535 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17540 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17541 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17542 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17544 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17549 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17550 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17551 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17553 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17558 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17559 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17560 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17562 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17567 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17568 int rb
= 29; /* SP */
17569 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17571 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17576 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17577 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17578 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17580 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17585 int rd
= uMIPS_RD5(ctx
->opcode
);
17586 int rs
= uMIPS_RS5(ctx
->opcode
);
17588 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17595 switch (ctx
->opcode
& 0x1) {
17605 switch (ctx
->opcode
& 0x1) {
17610 gen_addiur1sp(ctx
);
17614 case B16
: /* BC16 */
17615 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17616 sextract32(ctx
->opcode
, 0, 10) << 1,
17617 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17619 case BNEZ16
: /* BNEZC16 */
17620 case BEQZ16
: /* BEQZC16 */
17621 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17622 mmreg(uMIPS_RD(ctx
->opcode
)),
17623 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17624 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
17629 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17630 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17632 imm
= (imm
== 0x7f ? -1 : imm
);
17633 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17639 generate_exception_end(ctx
, EXCP_RI
);
17642 decode_micromips32_opc(env
, ctx
);
17655 /* MAJOR, P16, and P32 pools opcodes */
17659 NM_MOVE_BALC
= 0x02,
17667 NM_P16_SHIFT
= 0x0c,
17685 NM_P_LS_U12
= 0x21,
17695 NM_P16_ADDU
= 0x2c,
17709 NM_MOVEPREV
= 0x3f,
17712 /* POOL32A instruction pool */
17714 NM_POOL32A0
= 0x00,
17715 NM_SPECIAL2
= 0x01,
17718 NM_POOL32A5
= 0x05,
17719 NM_POOL32A7
= 0x07,
17722 /* P.GP.W instruction pool */
17724 NM_ADDIUGP_W
= 0x00,
17729 /* P48I instruction pool */
17733 NM_ADDIUGP48
= 0x02,
17734 NM_ADDIUPC48
= 0x03,
17739 /* P.U12 instruction pool */
17748 NM_ADDIUNEG
= 0x08,
17755 /* POOL32F instruction pool */
17757 NM_POOL32F_0
= 0x00,
17758 NM_POOL32F_3
= 0x03,
17759 NM_POOL32F_5
= 0x05,
17762 /* POOL32S instruction pool */
17764 NM_POOL32S_0
= 0x00,
17765 NM_POOL32S_4
= 0x04,
17768 /* P.LUI instruction pool */
17774 /* P.GP.BH instruction pool */
17779 NM_ADDIUGP_B
= 0x03,
17782 NM_P_GP_CP1
= 0x06,
17785 /* P.LS.U12 instruction pool */
17790 NM_P_PREFU12
= 0x03,
17803 /* P.LS.S9 instruction pool */
17809 NM_P_LS_UAWM
= 0x05,
17812 /* P.BAL instruction pool */
17818 /* P.J instruction pool */
17821 NM_JALRC_HB
= 0x01,
17822 NM_P_BALRSC
= 0x08,
17825 /* P.BR1 instruction pool */
17833 /* P.BR2 instruction pool */
17840 /* P.BRI instruction pool */
17852 /* P16.SHIFT instruction pool */
17858 /* POOL16C instruction pool */
17860 NM_POOL16C_0
= 0x00,
17864 /* P16.A1 instruction pool */
17866 NM_ADDIUR1SP
= 0x01,
17869 /* P16.A2 instruction pool */
17872 NM_P_ADDIURS5
= 0x01,
17875 /* P16.ADDU instruction pool */
17881 /* P16.SR instruction pool */
17884 NM_RESTORE_JRC16
= 0x01,
17887 /* P16.4X4 instruction pool */
17893 /* P16.LB instruction pool */
17900 /* P16.LH instruction pool */
17907 /* P.RI instruction pool */
17910 NM_P_SYSCALL
= 0x01,
17915 /* POOL32A0 instruction pool */
17950 NM_D_E_MT_VPE
= 0x56,
17958 /* CRC32 instruction pool */
17968 /* POOL32A5 instruction pool */
17970 NM_CMP_EQ_PH
= 0x00,
17971 NM_CMP_LT_PH
= 0x08,
17972 NM_CMP_LE_PH
= 0x10,
17973 NM_CMPGU_EQ_QB
= 0x18,
17974 NM_CMPGU_LT_QB
= 0x20,
17975 NM_CMPGU_LE_QB
= 0x28,
17976 NM_CMPGDU_EQ_QB
= 0x30,
17977 NM_CMPGDU_LT_QB
= 0x38,
17978 NM_CMPGDU_LE_QB
= 0x40,
17979 NM_CMPU_EQ_QB
= 0x48,
17980 NM_CMPU_LT_QB
= 0x50,
17981 NM_CMPU_LE_QB
= 0x58,
17982 NM_ADDQ_S_W
= 0x60,
17983 NM_SUBQ_S_W
= 0x68,
17987 NM_ADDQ_S_PH
= 0x01,
17988 NM_ADDQH_R_PH
= 0x09,
17989 NM_ADDQH_R_W
= 0x11,
17990 NM_ADDU_S_QB
= 0x19,
17991 NM_ADDU_S_PH
= 0x21,
17992 NM_ADDUH_R_QB
= 0x29,
17993 NM_SHRAV_R_PH
= 0x31,
17994 NM_SHRAV_R_QB
= 0x39,
17995 NM_SUBQ_S_PH
= 0x41,
17996 NM_SUBQH_R_PH
= 0x49,
17997 NM_SUBQH_R_W
= 0x51,
17998 NM_SUBU_S_QB
= 0x59,
17999 NM_SUBU_S_PH
= 0x61,
18000 NM_SUBUH_R_QB
= 0x69,
18001 NM_SHLLV_S_PH
= 0x71,
18002 NM_PRECR_SRA_R_PH_W
= 0x79,
18004 NM_MULEU_S_PH_QBL
= 0x12,
18005 NM_MULEU_S_PH_QBR
= 0x1a,
18006 NM_MULQ_RS_PH
= 0x22,
18007 NM_MULQ_S_PH
= 0x2a,
18008 NM_MULQ_RS_W
= 0x32,
18009 NM_MULQ_S_W
= 0x3a,
18012 NM_SHRAV_R_W
= 0x5a,
18013 NM_SHRLV_PH
= 0x62,
18014 NM_SHRLV_QB
= 0x6a,
18015 NM_SHLLV_QB
= 0x72,
18016 NM_SHLLV_S_W
= 0x7a,
18020 NM_MULEQ_S_W_PHL
= 0x04,
18021 NM_MULEQ_S_W_PHR
= 0x0c,
18023 NM_MUL_S_PH
= 0x05,
18024 NM_PRECR_QB_PH
= 0x0d,
18025 NM_PRECRQ_QB_PH
= 0x15,
18026 NM_PRECRQ_PH_W
= 0x1d,
18027 NM_PRECRQ_RS_PH_W
= 0x25,
18028 NM_PRECRQU_S_QB_PH
= 0x2d,
18029 NM_PACKRL_PH
= 0x35,
18033 NM_SHRA_R_W
= 0x5e,
18034 NM_SHRA_R_PH
= 0x66,
18035 NM_SHLL_S_PH
= 0x76,
18036 NM_SHLL_S_W
= 0x7e,
18041 /* POOL32A7 instruction pool */
18046 NM_POOL32AXF
= 0x07,
18049 /* P.SR instruction pool */
18055 /* P.SHIFT instruction pool */
18063 /* P.ROTX instruction pool */
18068 /* P.INS instruction pool */
18073 /* P.EXT instruction pool */
18078 /* POOL32F_0 (fmt) instruction pool */
18083 NM_SELEQZ_S
= 0x07,
18084 NM_SELEQZ_D
= 0x47,
18088 NM_SELNEZ_S
= 0x0f,
18089 NM_SELNEZ_D
= 0x4f,
18104 /* POOL32F_3 instruction pool */
18108 NM_MINA_FMT
= 0x04,
18109 NM_MAXA_FMT
= 0x05,
18110 NM_POOL32FXF
= 0x07,
18113 /* POOL32F_5 instruction pool */
18115 NM_CMP_CONDN_S
= 0x00,
18116 NM_CMP_CONDN_D
= 0x02,
18119 /* P.GP.LH instruction pool */
18125 /* P.GP.SH instruction pool */
18130 /* P.GP.CP1 instruction pool */
18138 /* P.LS.S0 instruction pool */
18155 NM_P_PREFS9
= 0x03,
18161 /* P.LS.S1 instruction pool */
18163 NM_ASET_ACLR
= 0x02,
18171 /* P.LS.E0 instruction pool */
18187 /* P.PREFE instruction pool */
18193 /* P.LLE instruction pool */
18199 /* P.SCE instruction pool */
18205 /* P.LS.WM instruction pool */
18211 /* P.LS.UAWM instruction pool */
18217 /* P.BR3A instruction pool */
18223 NM_BPOSGE32C
= 0x04,
18226 /* P16.RI instruction pool */
18228 NM_P16_SYSCALL
= 0x01,
18233 /* POOL16C_0 instruction pool */
18235 NM_POOL16C_00
= 0x00,
18238 /* P16.JRC instruction pool */
18244 /* P.SYSCALL instruction pool */
18250 /* P.TRAP instruction pool */
18256 /* P.CMOVE instruction pool */
18262 /* POOL32Axf instruction pool */
18264 NM_POOL32AXF_1
= 0x01,
18265 NM_POOL32AXF_2
= 0x02,
18266 NM_POOL32AXF_4
= 0x04,
18267 NM_POOL32AXF_5
= 0x05,
18268 NM_POOL32AXF_7
= 0x07,
18271 /* POOL32Axf_1 instruction pool */
18273 NM_POOL32AXF_1_0
= 0x00,
18274 NM_POOL32AXF_1_1
= 0x01,
18275 NM_POOL32AXF_1_3
= 0x03,
18276 NM_POOL32AXF_1_4
= 0x04,
18277 NM_POOL32AXF_1_5
= 0x05,
18278 NM_POOL32AXF_1_7
= 0x07,
18281 /* POOL32Axf_2 instruction pool */
18283 NM_POOL32AXF_2_0_7
= 0x00,
18284 NM_POOL32AXF_2_8_15
= 0x01,
18285 NM_POOL32AXF_2_16_23
= 0x02,
18286 NM_POOL32AXF_2_24_31
= 0x03,
18289 /* POOL32Axf_7 instruction pool */
18291 NM_SHRA_R_QB
= 0x0,
18296 /* POOL32Axf_1_0 instruction pool */
18304 /* POOL32Axf_1_1 instruction pool */
18310 /* POOL32Axf_1_3 instruction pool */
18318 /* POOL32Axf_1_4 instruction pool */
18324 /* POOL32Axf_1_5 instruction pool */
18326 NM_MAQ_S_W_PHR
= 0x0,
18327 NM_MAQ_S_W_PHL
= 0x1,
18328 NM_MAQ_SA_W_PHR
= 0x2,
18329 NM_MAQ_SA_W_PHL
= 0x3,
18332 /* POOL32Axf_1_7 instruction pool */
18336 NM_EXTR_RS_W
= 0x2,
18340 /* POOL32Axf_2_0_7 instruction pool */
18343 NM_DPAQ_S_W_PH
= 0x1,
18345 NM_DPSQ_S_W_PH
= 0x3,
18352 /* POOL32Axf_2_8_15 instruction pool */
18354 NM_DPAX_W_PH
= 0x0,
18355 NM_DPAQ_SA_L_W
= 0x1,
18356 NM_DPSX_W_PH
= 0x2,
18357 NM_DPSQ_SA_L_W
= 0x3,
18360 NM_EXTRV_R_W
= 0x7,
18363 /* POOL32Axf_2_16_23 instruction pool */
18365 NM_DPAU_H_QBL
= 0x0,
18366 NM_DPAQX_S_W_PH
= 0x1,
18367 NM_DPSU_H_QBL
= 0x2,
18368 NM_DPSQX_S_W_PH
= 0x3,
18371 NM_MULSA_W_PH
= 0x6,
18372 NM_EXTRV_RS_W
= 0x7,
18375 /* POOL32Axf_2_24_31 instruction pool */
18377 NM_DPAU_H_QBR
= 0x0,
18378 NM_DPAQX_SA_W_PH
= 0x1,
18379 NM_DPSU_H_QBR
= 0x2,
18380 NM_DPSQX_SA_W_PH
= 0x3,
18383 NM_MULSAQ_S_W_PH
= 0x6,
18384 NM_EXTRV_S_H
= 0x7,
18387 /* POOL32Axf_{4, 5} instruction pool */
18406 /* nanoMIPS DSP instructions */
18407 NM_ABSQ_S_QB
= 0x00,
18408 NM_ABSQ_S_PH
= 0x08,
18409 NM_ABSQ_S_W
= 0x10,
18410 NM_PRECEQ_W_PHL
= 0x28,
18411 NM_PRECEQ_W_PHR
= 0x30,
18412 NM_PRECEQU_PH_QBL
= 0x38,
18413 NM_PRECEQU_PH_QBR
= 0x48,
18414 NM_PRECEU_PH_QBL
= 0x58,
18415 NM_PRECEU_PH_QBR
= 0x68,
18416 NM_PRECEQU_PH_QBLA
= 0x39,
18417 NM_PRECEQU_PH_QBRA
= 0x49,
18418 NM_PRECEU_PH_QBLA
= 0x59,
18419 NM_PRECEU_PH_QBRA
= 0x69,
18420 NM_REPLV_PH
= 0x01,
18421 NM_REPLV_QB
= 0x09,
18424 NM_RADDU_W_QB
= 0x78,
18430 /* PP.SR instruction pool */
18434 NM_RESTORE_JRC
= 0x03,
18437 /* P.SR.F instruction pool */
18440 NM_RESTOREF
= 0x01,
18443 /* P16.SYSCALL instruction pool */
18445 NM_SYSCALL16
= 0x00,
18446 NM_HYPCALL16
= 0x01,
18449 /* POOL16C_00 instruction pool */
18457 /* PP.LSX and PP.LSXS instruction pool */
18495 /* ERETx instruction pool */
18501 /* POOL32FxF_{0, 1} insturction pool */
18510 NM_CVT_S_PL
= 0x84,
18511 NM_CVT_S_PU
= 0xa4,
18513 NM_CVT_L_S
= 0x004,
18514 NM_CVT_L_D
= 0x104,
18515 NM_CVT_W_S
= 0x024,
18516 NM_CVT_W_D
= 0x124,
18518 NM_RSQRT_S
= 0x008,
18519 NM_RSQRT_D
= 0x108,
18524 NM_RECIP_S
= 0x048,
18525 NM_RECIP_D
= 0x148,
18527 NM_FLOOR_L_S
= 0x00c,
18528 NM_FLOOR_L_D
= 0x10c,
18530 NM_FLOOR_W_S
= 0x02c,
18531 NM_FLOOR_W_D
= 0x12c,
18533 NM_CEIL_L_S
= 0x04c,
18534 NM_CEIL_L_D
= 0x14c,
18535 NM_CEIL_W_S
= 0x06c,
18536 NM_CEIL_W_D
= 0x16c,
18537 NM_TRUNC_L_S
= 0x08c,
18538 NM_TRUNC_L_D
= 0x18c,
18539 NM_TRUNC_W_S
= 0x0ac,
18540 NM_TRUNC_W_D
= 0x1ac,
18541 NM_ROUND_L_S
= 0x0cc,
18542 NM_ROUND_L_D
= 0x1cc,
18543 NM_ROUND_W_S
= 0x0ec,
18544 NM_ROUND_W_D
= 0x1ec,
18552 NM_CVT_D_S
= 0x04d,
18553 NM_CVT_D_W
= 0x0cd,
18554 NM_CVT_D_L
= 0x14d,
18555 NM_CVT_S_D
= 0x06d,
18556 NM_CVT_S_W
= 0x0ed,
18557 NM_CVT_S_L
= 0x16d,
18560 /* P.LL instruction pool */
18566 /* P.SC instruction pool */
18572 /* P.DVP instruction pool */
18581 * nanoMIPS decoding engine
18586 /* extraction utilities */
18588 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18589 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18590 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18591 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18592 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18594 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18595 static inline int decode_gpr_gpr3(int r
)
18597 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18599 return map
[r
& 0x7];
18602 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18603 static inline int decode_gpr_gpr3_src_store(int r
)
18605 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18607 return map
[r
& 0x7];
18610 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18611 static inline int decode_gpr_gpr4(int r
)
18613 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18614 16, 17, 18, 19, 20, 21, 22, 23 };
18616 return map
[r
& 0xf];
18619 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18620 static inline int decode_gpr_gpr4_zero(int r
)
18622 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18623 16, 17, 18, 19, 20, 21, 22, 23 };
18625 return map
[r
& 0xf];
18629 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18631 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18634 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18635 uint8_t gp
, uint16_t u
)
18638 TCGv va
= tcg_temp_new();
18639 TCGv t0
= tcg_temp_new();
18641 while (counter
!= count
) {
18642 bool use_gp
= gp
&& (counter
== count
- 1);
18643 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18644 int this_offset
= -((counter
+ 1) << 2);
18645 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18646 gen_load_gpr(t0
, this_rt
);
18647 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18648 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18652 /* adjust stack pointer */
18653 gen_adjust_sp(ctx
, -u
);
18659 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18660 uint8_t gp
, uint16_t u
)
18663 TCGv va
= tcg_temp_new();
18664 TCGv t0
= tcg_temp_new();
18666 while (counter
!= count
) {
18667 bool use_gp
= gp
&& (counter
== count
- 1);
18668 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18669 int this_offset
= u
- ((counter
+ 1) << 2);
18670 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18671 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18672 ctx
->default_tcg_memop_mask
);
18673 tcg_gen_ext32s_tl(t0
, t0
);
18674 gen_store_gpr(t0
, this_rt
);
18678 /* adjust stack pointer */
18679 gen_adjust_sp(ctx
, u
);
18685 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18687 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18688 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18690 switch (extract32(ctx
->opcode
, 2, 2)) {
18692 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18695 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18698 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18701 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18706 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18708 int rt
= extract32(ctx
->opcode
, 21, 5);
18709 int rs
= extract32(ctx
->opcode
, 16, 5);
18710 int rd
= extract32(ctx
->opcode
, 11, 5);
18712 switch (extract32(ctx
->opcode
, 3, 7)) {
18714 switch (extract32(ctx
->opcode
, 10, 1)) {
18717 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18721 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18727 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18731 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18734 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18737 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18740 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18743 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18746 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18749 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18752 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18756 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18759 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18762 switch (extract32(ctx
->opcode
, 10, 1)) {
18764 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18767 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18772 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18775 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18778 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18781 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18784 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18789 #ifndef CONFIG_USER_ONLY
18790 TCGv t0
= tcg_temp_new();
18791 switch (extract32(ctx
->opcode
, 10, 1)) {
18794 check_cp0_enabled(ctx
);
18795 gen_helper_dvp(t0
, cpu_env
);
18796 gen_store_gpr(t0
, rt
);
18801 check_cp0_enabled(ctx
);
18802 gen_helper_evp(t0
, cpu_env
);
18803 gen_store_gpr(t0
, rt
);
18810 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18815 TCGv t0
= tcg_temp_new();
18816 TCGv t1
= tcg_temp_new();
18817 TCGv t2
= tcg_temp_new();
18819 gen_load_gpr(t1
, rs
);
18820 gen_load_gpr(t2
, rt
);
18821 tcg_gen_add_tl(t0
, t1
, t2
);
18822 tcg_gen_ext32s_tl(t0
, t0
);
18823 tcg_gen_xor_tl(t1
, t1
, t2
);
18824 tcg_gen_xor_tl(t2
, t0
, t2
);
18825 tcg_gen_andc_tl(t1
, t2
, t1
);
18827 /* operands of same sign, result different sign */
18828 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18829 gen_store_gpr(t0
, rd
);
18837 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18840 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18843 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18846 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18849 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18852 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18855 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18858 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18860 #ifndef CONFIG_USER_ONLY
18862 check_cp0_enabled(ctx
);
18864 /* Treat as NOP. */
18867 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18870 check_cp0_enabled(ctx
);
18872 TCGv t0
= tcg_temp_new();
18874 gen_load_gpr(t0
, rt
);
18875 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18879 case NM_D_E_MT_VPE
:
18881 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18882 TCGv t0
= tcg_temp_new();
18889 gen_helper_dmt(t0
);
18890 gen_store_gpr(t0
, rt
);
18891 } else if (rs
== 0) {
18894 gen_helper_dvpe(t0
, cpu_env
);
18895 gen_store_gpr(t0
, rt
);
18897 generate_exception_end(ctx
, EXCP_RI
);
18904 gen_helper_emt(t0
);
18905 gen_store_gpr(t0
, rt
);
18906 } else if (rs
== 0) {
18909 gen_helper_evpe(t0
, cpu_env
);
18910 gen_store_gpr(t0
, rt
);
18912 generate_exception_end(ctx
, EXCP_RI
);
18923 TCGv t0
= tcg_temp_new();
18924 TCGv t1
= tcg_temp_new();
18926 gen_load_gpr(t0
, rt
);
18927 gen_load_gpr(t1
, rs
);
18928 gen_helper_fork(t0
, t1
);
18935 check_cp0_enabled(ctx
);
18937 /* Treat as NOP. */
18940 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18941 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18945 check_cp0_enabled(ctx
);
18946 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18947 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18952 TCGv t0
= tcg_temp_new();
18954 gen_load_gpr(t0
, rs
);
18955 gen_helper_yield(t0
, cpu_env
, t0
);
18956 gen_store_gpr(t0
, rt
);
18962 generate_exception_end(ctx
, EXCP_RI
);
18968 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
18969 int ret
, int v1
, int v2
)
18975 t0
= tcg_temp_new_i32();
18977 v0_t
= tcg_temp_new();
18978 v1_t
= tcg_temp_new();
18980 tcg_gen_movi_i32(t0
, v2
>> 3);
18982 gen_load_gpr(v0_t
, ret
);
18983 gen_load_gpr(v1_t
, v1
);
18986 case NM_MAQ_S_W_PHR
:
18988 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18990 case NM_MAQ_S_W_PHL
:
18992 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
18994 case NM_MAQ_SA_W_PHR
:
18996 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
18998 case NM_MAQ_SA_W_PHL
:
19000 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19003 generate_exception_end(ctx
, EXCP_RI
);
19007 tcg_temp_free_i32(t0
);
19009 tcg_temp_free(v0_t
);
19010 tcg_temp_free(v1_t
);
19014 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19015 int ret
, int v1
, int v2
)
19018 TCGv t0
= tcg_temp_new();
19019 TCGv t1
= tcg_temp_new();
19020 TCGv v0_t
= tcg_temp_new();
19022 gen_load_gpr(v0_t
, v1
);
19025 case NM_POOL32AXF_1_0
:
19027 switch (extract32(ctx
->opcode
, 12, 2)) {
19029 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
19032 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
19035 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
19038 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
19042 case NM_POOL32AXF_1_1
:
19044 switch (extract32(ctx
->opcode
, 12, 2)) {
19046 tcg_gen_movi_tl(t0
, v2
);
19047 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
19050 tcg_gen_movi_tl(t0
, v2
>> 3);
19051 gen_helper_shilo(t0
, v0_t
, cpu_env
);
19054 generate_exception_end(ctx
, EXCP_RI
);
19058 case NM_POOL32AXF_1_3
:
19060 imm
= extract32(ctx
->opcode
, 14, 7);
19061 switch (extract32(ctx
->opcode
, 12, 2)) {
19063 tcg_gen_movi_tl(t0
, imm
);
19064 gen_helper_rddsp(t0
, t0
, cpu_env
);
19065 gen_store_gpr(t0
, ret
);
19068 gen_load_gpr(t0
, ret
);
19069 tcg_gen_movi_tl(t1
, imm
);
19070 gen_helper_wrdsp(t0
, t1
, cpu_env
);
19073 tcg_gen_movi_tl(t0
, v2
>> 3);
19074 tcg_gen_movi_tl(t1
, v1
);
19075 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
19076 gen_store_gpr(t0
, ret
);
19079 tcg_gen_movi_tl(t0
, v2
>> 3);
19080 tcg_gen_movi_tl(t1
, v1
);
19081 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
19082 gen_store_gpr(t0
, ret
);
19086 case NM_POOL32AXF_1_4
:
19088 tcg_gen_movi_tl(t0
, v2
>> 2);
19089 switch (extract32(ctx
->opcode
, 12, 1)) {
19091 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
19092 gen_store_gpr(t0
, ret
);
19095 gen_helper_shrl_qb(t0
, t0
, v0_t
);
19096 gen_store_gpr(t0
, ret
);
19100 case NM_POOL32AXF_1_5
:
19101 opc
= extract32(ctx
->opcode
, 12, 2);
19102 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
19104 case NM_POOL32AXF_1_7
:
19106 tcg_gen_movi_tl(t0
, v2
>> 3);
19107 tcg_gen_movi_tl(t1
, v1
);
19108 switch (extract32(ctx
->opcode
, 12, 2)) {
19110 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
19111 gen_store_gpr(t0
, ret
);
19114 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
19115 gen_store_gpr(t0
, ret
);
19118 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
19119 gen_store_gpr(t0
, ret
);
19122 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
19123 gen_store_gpr(t0
, ret
);
19128 generate_exception_end(ctx
, EXCP_RI
);
19134 tcg_temp_free(v0_t
);
19137 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19138 TCGv v0
, TCGv v1
, int rd
)
19142 t0
= tcg_temp_new_i32();
19144 tcg_gen_movi_i32(t0
, rd
>> 3);
19147 case NM_POOL32AXF_2_0_7
:
19148 switch (extract32(ctx
->opcode
, 9, 3)) {
19151 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19153 case NM_DPAQ_S_W_PH
:
19155 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19159 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19161 case NM_DPSQ_S_W_PH
:
19163 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19166 generate_exception_end(ctx
, EXCP_RI
);
19170 case NM_POOL32AXF_2_8_15
:
19171 switch (extract32(ctx
->opcode
, 9, 3)) {
19174 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19176 case NM_DPAQ_SA_L_W
:
19178 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19182 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19184 case NM_DPSQ_SA_L_W
:
19186 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19189 generate_exception_end(ctx
, EXCP_RI
);
19193 case NM_POOL32AXF_2_16_23
:
19194 switch (extract32(ctx
->opcode
, 9, 3)) {
19195 case NM_DPAU_H_QBL
:
19197 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19199 case NM_DPAQX_S_W_PH
:
19201 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19203 case NM_DPSU_H_QBL
:
19205 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19207 case NM_DPSQX_S_W_PH
:
19209 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19211 case NM_MULSA_W_PH
:
19213 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19216 generate_exception_end(ctx
, EXCP_RI
);
19220 case NM_POOL32AXF_2_24_31
:
19221 switch (extract32(ctx
->opcode
, 9, 3)) {
19222 case NM_DPAU_H_QBR
:
19224 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19226 case NM_DPAQX_SA_W_PH
:
19228 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19230 case NM_DPSU_H_QBR
:
19232 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19234 case NM_DPSQX_SA_W_PH
:
19236 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19238 case NM_MULSAQ_S_W_PH
:
19240 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19243 generate_exception_end(ctx
, EXCP_RI
);
19248 generate_exception_end(ctx
, EXCP_RI
);
19252 tcg_temp_free_i32(t0
);
19255 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19256 int rt
, int rs
, int rd
)
19259 TCGv t0
= tcg_temp_new();
19260 TCGv t1
= tcg_temp_new();
19261 TCGv v0_t
= tcg_temp_new();
19262 TCGv v1_t
= tcg_temp_new();
19264 gen_load_gpr(v0_t
, rt
);
19265 gen_load_gpr(v1_t
, rs
);
19268 case NM_POOL32AXF_2_0_7
:
19269 switch (extract32(ctx
->opcode
, 9, 3)) {
19271 case NM_DPAQ_S_W_PH
:
19273 case NM_DPSQ_S_W_PH
:
19274 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19279 gen_load_gpr(t0
, rs
);
19281 if (rd
!= 0 && rd
!= 2) {
19282 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19283 tcg_gen_ext32u_tl(t0
, t0
);
19284 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19285 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19287 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19293 int acc
= extract32(ctx
->opcode
, 14, 2);
19294 TCGv_i64 t2
= tcg_temp_new_i64();
19295 TCGv_i64 t3
= tcg_temp_new_i64();
19297 gen_load_gpr(t0
, rt
);
19298 gen_load_gpr(t1
, rs
);
19299 tcg_gen_ext_tl_i64(t2
, t0
);
19300 tcg_gen_ext_tl_i64(t3
, t1
);
19301 tcg_gen_mul_i64(t2
, t2
, t3
);
19302 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19303 tcg_gen_add_i64(t2
, t2
, t3
);
19304 tcg_temp_free_i64(t3
);
19305 gen_move_low32(cpu_LO
[acc
], t2
);
19306 gen_move_high32(cpu_HI
[acc
], t2
);
19307 tcg_temp_free_i64(t2
);
19313 int acc
= extract32(ctx
->opcode
, 14, 2);
19314 TCGv_i32 t2
= tcg_temp_new_i32();
19315 TCGv_i32 t3
= tcg_temp_new_i32();
19317 gen_load_gpr(t0
, rs
);
19318 gen_load_gpr(t1
, rt
);
19319 tcg_gen_trunc_tl_i32(t2
, t0
);
19320 tcg_gen_trunc_tl_i32(t3
, t1
);
19321 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19322 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19323 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19324 tcg_temp_free_i32(t2
);
19325 tcg_temp_free_i32(t3
);
19330 gen_load_gpr(v1_t
, rs
);
19331 tcg_gen_movi_tl(t0
, rd
>> 3);
19332 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19333 gen_store_gpr(t0
, ret
);
19337 case NM_POOL32AXF_2_8_15
:
19338 switch (extract32(ctx
->opcode
, 9, 3)) {
19340 case NM_DPAQ_SA_L_W
:
19342 case NM_DPSQ_SA_L_W
:
19343 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19348 int acc
= extract32(ctx
->opcode
, 14, 2);
19349 TCGv_i64 t2
= tcg_temp_new_i64();
19350 TCGv_i64 t3
= tcg_temp_new_i64();
19352 gen_load_gpr(t0
, rs
);
19353 gen_load_gpr(t1
, rt
);
19354 tcg_gen_ext32u_tl(t0
, t0
);
19355 tcg_gen_ext32u_tl(t1
, t1
);
19356 tcg_gen_extu_tl_i64(t2
, t0
);
19357 tcg_gen_extu_tl_i64(t3
, t1
);
19358 tcg_gen_mul_i64(t2
, t2
, t3
);
19359 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19360 tcg_gen_add_i64(t2
, t2
, t3
);
19361 tcg_temp_free_i64(t3
);
19362 gen_move_low32(cpu_LO
[acc
], t2
);
19363 gen_move_high32(cpu_HI
[acc
], t2
);
19364 tcg_temp_free_i64(t2
);
19370 int acc
= extract32(ctx
->opcode
, 14, 2);
19371 TCGv_i32 t2
= tcg_temp_new_i32();
19372 TCGv_i32 t3
= tcg_temp_new_i32();
19374 gen_load_gpr(t0
, rs
);
19375 gen_load_gpr(t1
, rt
);
19376 tcg_gen_trunc_tl_i32(t2
, t0
);
19377 tcg_gen_trunc_tl_i32(t3
, t1
);
19378 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19379 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19380 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19381 tcg_temp_free_i32(t2
);
19382 tcg_temp_free_i32(t3
);
19387 tcg_gen_movi_tl(t0
, rd
>> 3);
19388 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19389 gen_store_gpr(t0
, ret
);
19392 generate_exception_end(ctx
, EXCP_RI
);
19396 case NM_POOL32AXF_2_16_23
:
19397 switch (extract32(ctx
->opcode
, 9, 3)) {
19398 case NM_DPAU_H_QBL
:
19399 case NM_DPAQX_S_W_PH
:
19400 case NM_DPSU_H_QBL
:
19401 case NM_DPSQX_S_W_PH
:
19402 case NM_MULSA_W_PH
:
19403 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19407 tcg_gen_movi_tl(t0
, rd
>> 3);
19408 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19409 gen_store_gpr(t0
, ret
);
19414 int acc
= extract32(ctx
->opcode
, 14, 2);
19415 TCGv_i64 t2
= tcg_temp_new_i64();
19416 TCGv_i64 t3
= tcg_temp_new_i64();
19418 gen_load_gpr(t0
, rs
);
19419 gen_load_gpr(t1
, rt
);
19420 tcg_gen_ext_tl_i64(t2
, t0
);
19421 tcg_gen_ext_tl_i64(t3
, t1
);
19422 tcg_gen_mul_i64(t2
, t2
, t3
);
19423 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19424 tcg_gen_sub_i64(t2
, t3
, t2
);
19425 tcg_temp_free_i64(t3
);
19426 gen_move_low32(cpu_LO
[acc
], t2
);
19427 gen_move_high32(cpu_HI
[acc
], t2
);
19428 tcg_temp_free_i64(t2
);
19431 case NM_EXTRV_RS_W
:
19433 tcg_gen_movi_tl(t0
, rd
>> 3);
19434 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19435 gen_store_gpr(t0
, ret
);
19439 case NM_POOL32AXF_2_24_31
:
19440 switch (extract32(ctx
->opcode
, 9, 3)) {
19441 case NM_DPAU_H_QBR
:
19442 case NM_DPAQX_SA_W_PH
:
19443 case NM_DPSU_H_QBR
:
19444 case NM_DPSQX_SA_W_PH
:
19445 case NM_MULSAQ_S_W_PH
:
19446 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19450 tcg_gen_movi_tl(t0
, rd
>> 3);
19451 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19452 gen_store_gpr(t0
, ret
);
19457 int acc
= extract32(ctx
->opcode
, 14, 2);
19458 TCGv_i64 t2
= tcg_temp_new_i64();
19459 TCGv_i64 t3
= tcg_temp_new_i64();
19461 gen_load_gpr(t0
, rs
);
19462 gen_load_gpr(t1
, rt
);
19463 tcg_gen_ext32u_tl(t0
, t0
);
19464 tcg_gen_ext32u_tl(t1
, t1
);
19465 tcg_gen_extu_tl_i64(t2
, t0
);
19466 tcg_gen_extu_tl_i64(t3
, t1
);
19467 tcg_gen_mul_i64(t2
, t2
, t3
);
19468 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19469 tcg_gen_sub_i64(t2
, t3
, t2
);
19470 tcg_temp_free_i64(t3
);
19471 gen_move_low32(cpu_LO
[acc
], t2
);
19472 gen_move_high32(cpu_HI
[acc
], t2
);
19473 tcg_temp_free_i64(t2
);
19478 tcg_gen_movi_tl(t0
, rd
>> 3);
19479 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19480 gen_store_gpr(t0
, ret
);
19485 generate_exception_end(ctx
, EXCP_RI
);
19492 tcg_temp_free(v0_t
);
19493 tcg_temp_free(v1_t
);
19496 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19500 TCGv t0
= tcg_temp_new();
19501 TCGv v0_t
= tcg_temp_new();
19503 gen_load_gpr(v0_t
, rs
);
19508 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19509 gen_store_gpr(v0_t
, ret
);
19513 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19514 gen_store_gpr(v0_t
, ret
);
19518 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19519 gen_store_gpr(v0_t
, ret
);
19521 case NM_PRECEQ_W_PHL
:
19523 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19524 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19525 gen_store_gpr(v0_t
, ret
);
19527 case NM_PRECEQ_W_PHR
:
19529 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19530 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19531 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19532 gen_store_gpr(v0_t
, ret
);
19534 case NM_PRECEQU_PH_QBL
:
19536 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19537 gen_store_gpr(v0_t
, ret
);
19539 case NM_PRECEQU_PH_QBR
:
19541 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19542 gen_store_gpr(v0_t
, ret
);
19544 case NM_PRECEQU_PH_QBLA
:
19546 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19547 gen_store_gpr(v0_t
, ret
);
19549 case NM_PRECEQU_PH_QBRA
:
19551 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19552 gen_store_gpr(v0_t
, ret
);
19554 case NM_PRECEU_PH_QBL
:
19556 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19557 gen_store_gpr(v0_t
, ret
);
19559 case NM_PRECEU_PH_QBR
:
19561 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19562 gen_store_gpr(v0_t
, ret
);
19564 case NM_PRECEU_PH_QBLA
:
19566 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19567 gen_store_gpr(v0_t
, ret
);
19569 case NM_PRECEU_PH_QBRA
:
19571 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19572 gen_store_gpr(v0_t
, ret
);
19576 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19577 tcg_gen_shli_tl(t0
, v0_t
, 16);
19578 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19579 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19580 gen_store_gpr(v0_t
, ret
);
19584 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19585 tcg_gen_shli_tl(t0
, v0_t
, 8);
19586 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19587 tcg_gen_shli_tl(t0
, v0_t
, 16);
19588 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19589 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19590 gen_store_gpr(v0_t
, ret
);
19594 gen_helper_bitrev(v0_t
, v0_t
);
19595 gen_store_gpr(v0_t
, ret
);
19600 TCGv tv0
= tcg_temp_new();
19602 gen_load_gpr(tv0
, rt
);
19603 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19604 gen_store_gpr(v0_t
, ret
);
19605 tcg_temp_free(tv0
);
19608 case NM_RADDU_W_QB
:
19610 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19611 gen_store_gpr(v0_t
, ret
);
19614 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19618 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19622 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19625 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19628 generate_exception_end(ctx
, EXCP_RI
);
19632 tcg_temp_free(v0_t
);
19636 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19637 int rt
, int rs
, int rd
)
19639 TCGv t0
= tcg_temp_new();
19640 TCGv rs_t
= tcg_temp_new();
19642 gen_load_gpr(rs_t
, rs
);
19647 tcg_gen_movi_tl(t0
, rd
>> 2);
19648 switch (extract32(ctx
->opcode
, 12, 1)) {
19651 gen_helper_shra_qb(t0
, t0
, rs_t
);
19652 gen_store_gpr(t0
, rt
);
19656 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19657 gen_store_gpr(t0
, rt
);
19663 tcg_gen_movi_tl(t0
, rd
>> 1);
19664 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19665 gen_store_gpr(t0
, rt
);
19671 target_long result
;
19672 imm
= extract32(ctx
->opcode
, 13, 8);
19673 result
= (uint32_t)imm
<< 24 |
19674 (uint32_t)imm
<< 16 |
19675 (uint32_t)imm
<< 8 |
19677 result
= (int32_t)result
;
19678 tcg_gen_movi_tl(t0
, result
);
19679 gen_store_gpr(t0
, rt
);
19683 generate_exception_end(ctx
, EXCP_RI
);
19687 tcg_temp_free(rs_t
);
19691 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19693 int rt
= extract32(ctx
->opcode
, 21, 5);
19694 int rs
= extract32(ctx
->opcode
, 16, 5);
19695 int rd
= extract32(ctx
->opcode
, 11, 5);
19697 switch (extract32(ctx
->opcode
, 6, 3)) {
19698 case NM_POOL32AXF_1
:
19700 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19701 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19704 case NM_POOL32AXF_2
:
19706 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19707 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19710 case NM_POOL32AXF_4
:
19712 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19713 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19716 case NM_POOL32AXF_5
:
19717 switch (extract32(ctx
->opcode
, 9, 7)) {
19718 #ifndef CONFIG_USER_ONLY
19720 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19723 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19726 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19729 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19732 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19735 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19738 check_cp0_enabled(ctx
);
19740 TCGv t0
= tcg_temp_new();
19742 save_cpu_state(ctx
, 1);
19743 gen_helper_di(t0
, cpu_env
);
19744 gen_store_gpr(t0
, rt
);
19745 /* Stop translation as we may have switched the execution mode */
19746 ctx
->base
.is_jmp
= DISAS_STOP
;
19751 check_cp0_enabled(ctx
);
19753 TCGv t0
= tcg_temp_new();
19755 save_cpu_state(ctx
, 1);
19756 gen_helper_ei(t0
, cpu_env
);
19757 gen_store_gpr(t0
, rt
);
19758 /* Stop translation as we may have switched the execution mode */
19759 ctx
->base
.is_jmp
= DISAS_STOP
;
19764 gen_load_srsgpr(rs
, rt
);
19767 gen_store_srsgpr(rs
, rt
);
19770 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19773 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19776 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19780 generate_exception_end(ctx
, EXCP_RI
);
19784 case NM_POOL32AXF_7
:
19786 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19787 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19791 generate_exception_end(ctx
, EXCP_RI
);
19796 /* Immediate Value Compact Branches */
19797 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19798 int rt
, int32_t imm
, int32_t offset
)
19801 int bcond_compute
= 0;
19802 TCGv t0
= tcg_temp_new();
19803 TCGv t1
= tcg_temp_new();
19805 gen_load_gpr(t0
, rt
);
19806 tcg_gen_movi_tl(t1
, imm
);
19807 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19809 /* Load needed operands and calculate btarget */
19812 if (rt
== 0 && imm
== 0) {
19813 /* Unconditional branch */
19814 } else if (rt
== 0 && imm
!= 0) {
19819 cond
= TCG_COND_EQ
;
19825 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19826 generate_exception_end(ctx
, EXCP_RI
);
19828 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19829 /* Unconditional branch */
19830 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19834 tcg_gen_shri_tl(t0
, t0
, imm
);
19835 tcg_gen_andi_tl(t0
, t0
, 1);
19836 tcg_gen_movi_tl(t1
, 0);
19838 if (opc
== NM_BBEQZC
) {
19839 cond
= TCG_COND_EQ
;
19841 cond
= TCG_COND_NE
;
19846 if (rt
== 0 && imm
== 0) {
19849 } else if (rt
== 0 && imm
!= 0) {
19850 /* Unconditional branch */
19853 cond
= TCG_COND_NE
;
19857 if (rt
== 0 && imm
== 0) {
19858 /* Unconditional branch */
19861 cond
= TCG_COND_GE
;
19866 cond
= TCG_COND_LT
;
19869 if (rt
== 0 && imm
== 0) {
19870 /* Unconditional branch */
19873 cond
= TCG_COND_GEU
;
19878 cond
= TCG_COND_LTU
;
19881 MIPS_INVAL("Immediate Value Compact branch");
19882 generate_exception_end(ctx
, EXCP_RI
);
19886 /* branch completion */
19887 clear_branch_hflags(ctx
);
19888 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19890 if (bcond_compute
== 0) {
19891 /* Uncoditional compact branch */
19892 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19894 /* Conditional compact branch */
19895 TCGLabel
*fs
= gen_new_label();
19897 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19899 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19902 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19910 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19911 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19914 TCGv t0
= tcg_temp_new();
19915 TCGv t1
= tcg_temp_new();
19918 gen_load_gpr(t0
, rs
);
19922 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19925 /* calculate btarget */
19926 tcg_gen_shli_tl(t0
, t0
, 1);
19927 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19928 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19930 /* branch completion */
19931 clear_branch_hflags(ctx
);
19932 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19934 /* unconditional branch to register */
19935 tcg_gen_mov_tl(cpu_PC
, btarget
);
19936 tcg_gen_lookup_and_goto_ptr();
19942 /* nanoMIPS Branches */
19943 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19944 int rs
, int rt
, int32_t offset
)
19946 int bcond_compute
= 0;
19947 TCGv t0
= tcg_temp_new();
19948 TCGv t1
= tcg_temp_new();
19950 /* Load needed operands and calculate btarget */
19952 /* compact branch */
19955 gen_load_gpr(t0
, rs
);
19956 gen_load_gpr(t1
, rt
);
19958 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19962 if (rs
== 0 || rs
== rt
) {
19963 /* OPC_BLEZALC, OPC_BGEZALC */
19964 /* OPC_BGTZALC, OPC_BLTZALC */
19965 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19967 gen_load_gpr(t0
, rs
);
19968 gen_load_gpr(t1
, rt
);
19970 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19973 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19977 /* OPC_BEQZC, OPC_BNEZC */
19978 gen_load_gpr(t0
, rs
);
19980 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19982 /* OPC_JIC, OPC_JIALC */
19983 TCGv tbase
= tcg_temp_new();
19984 TCGv toffset
= tcg_temp_new();
19986 gen_load_gpr(tbase
, rt
);
19987 tcg_gen_movi_tl(toffset
, offset
);
19988 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
19989 tcg_temp_free(tbase
);
19990 tcg_temp_free(toffset
);
19994 MIPS_INVAL("Compact branch/jump");
19995 generate_exception_end(ctx
, EXCP_RI
);
19999 if (bcond_compute
== 0) {
20000 /* Uncoditional compact branch */
20003 gen_goto_tb(ctx
, 0, ctx
->btarget
);
20006 MIPS_INVAL("Compact branch/jump");
20007 generate_exception_end(ctx
, EXCP_RI
);
20011 /* Conditional compact branch */
20012 TCGLabel
*fs
= gen_new_label();
20016 if (rs
== 0 && rt
!= 0) {
20018 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20019 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20021 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20024 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
20028 if (rs
== 0 && rt
!= 0) {
20030 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20031 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20033 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20036 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
20040 if (rs
== 0 && rt
!= 0) {
20042 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20043 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20045 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20048 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
20052 if (rs
== 0 && rt
!= 0) {
20054 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20055 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20057 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20060 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
20064 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
20067 MIPS_INVAL("Compact conditional branch/jump");
20068 generate_exception_end(ctx
, EXCP_RI
);
20072 /* branch completion */
20073 clear_branch_hflags(ctx
);
20074 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20076 /* Generating branch here as compact branches don't have delay slot */
20077 gen_goto_tb(ctx
, 1, ctx
->btarget
);
20080 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
20089 /* nanoMIPS CP1 Branches */
20090 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
20091 int32_t ft
, int32_t offset
)
20093 target_ulong btarget
;
20094 TCGv_i64 t0
= tcg_temp_new_i64();
20096 gen_load_fpr64(ctx
, t0
, ft
);
20097 tcg_gen_andi_i64(t0
, t0
, 1);
20099 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20103 tcg_gen_xori_i64(t0
, t0
, 1);
20104 ctx
->hflags
|= MIPS_HFLAG_BC
;
20107 /* t0 already set */
20108 ctx
->hflags
|= MIPS_HFLAG_BC
;
20111 MIPS_INVAL("cp1 cond branch");
20112 generate_exception_end(ctx
, EXCP_RI
);
20116 tcg_gen_trunc_i64_tl(bcond
, t0
);
20118 ctx
->btarget
= btarget
;
20121 tcg_temp_free_i64(t0
);
20125 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20128 t0
= tcg_temp_new();
20129 t1
= tcg_temp_new();
20131 gen_load_gpr(t0
, rs
);
20132 gen_load_gpr(t1
, rt
);
20134 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20135 /* PP.LSXS instructions require shifting */
20136 switch (extract32(ctx
->opcode
, 7, 4)) {
20141 tcg_gen_shli_tl(t0
, t0
, 1);
20148 tcg_gen_shli_tl(t0
, t0
, 2);
20152 tcg_gen_shli_tl(t0
, t0
, 3);
20156 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20158 switch (extract32(ctx
->opcode
, 7, 4)) {
20160 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20162 gen_store_gpr(t0
, rd
);
20166 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20168 gen_store_gpr(t0
, rd
);
20172 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20174 gen_store_gpr(t0
, rd
);
20177 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20179 gen_store_gpr(t0
, rd
);
20183 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20185 gen_store_gpr(t0
, rd
);
20189 gen_load_gpr(t1
, rd
);
20190 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20196 gen_load_gpr(t1
, rd
);
20197 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20203 gen_load_gpr(t1
, rd
);
20204 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20208 /*case NM_LWC1XS:*/
20210 /*case NM_LDC1XS:*/
20212 /*case NM_SWC1XS:*/
20214 /*case NM_SDC1XS:*/
20215 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20216 check_cp1_enabled(ctx
);
20217 switch (extract32(ctx
->opcode
, 7, 4)) {
20219 /*case NM_LWC1XS:*/
20220 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20223 /*case NM_LDC1XS:*/
20224 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20227 /*case NM_SWC1XS:*/
20228 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20231 /*case NM_SDC1XS:*/
20232 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20236 generate_exception_err(ctx
, EXCP_CpU
, 1);
20240 generate_exception_end(ctx
, EXCP_RI
);
20248 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20252 rt
= extract32(ctx
->opcode
, 21, 5);
20253 rs
= extract32(ctx
->opcode
, 16, 5);
20254 rd
= extract32(ctx
->opcode
, 11, 5);
20256 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20257 generate_exception_end(ctx
, EXCP_RI
);
20260 check_cp1_enabled(ctx
);
20261 switch (extract32(ctx
->opcode
, 0, 3)) {
20263 switch (extract32(ctx
->opcode
, 3, 7)) {
20265 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20268 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20271 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20274 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20277 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20280 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20283 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20286 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20289 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20292 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20295 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20298 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20301 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20304 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20307 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20310 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20313 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20316 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20319 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20322 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20325 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20328 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20331 generate_exception_end(ctx
, EXCP_RI
);
20336 switch (extract32(ctx
->opcode
, 3, 3)) {
20338 switch (extract32(ctx
->opcode
, 9, 1)) {
20340 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20343 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20348 switch (extract32(ctx
->opcode
, 9, 1)) {
20350 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20353 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20358 switch (extract32(ctx
->opcode
, 9, 1)) {
20360 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20363 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20368 switch (extract32(ctx
->opcode
, 9, 1)) {
20370 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20373 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20378 switch (extract32(ctx
->opcode
, 6, 8)) {
20380 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20383 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20386 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20389 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20392 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20395 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20398 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20401 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20404 switch (extract32(ctx
->opcode
, 6, 9)) {
20406 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20409 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20412 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20415 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20418 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20421 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20424 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20427 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20430 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20433 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20436 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20439 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20442 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20445 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20448 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20451 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20454 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20457 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20460 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20463 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20466 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20469 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20472 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20475 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20478 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20481 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20484 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20487 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20490 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20493 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20496 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20499 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20502 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20505 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20508 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20511 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20514 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20517 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20520 generate_exception_end(ctx
, EXCP_RI
);
20529 switch (extract32(ctx
->opcode
, 3, 3)) {
20530 case NM_CMP_CONDN_S
:
20531 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20533 case NM_CMP_CONDN_D
:
20534 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20537 generate_exception_end(ctx
, EXCP_RI
);
20542 generate_exception_end(ctx
, EXCP_RI
);
20547 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20548 int rd
, int rs
, int rt
)
20551 TCGv t0
= tcg_temp_new();
20552 TCGv v1_t
= tcg_temp_new();
20553 TCGv v2_t
= tcg_temp_new();
20555 gen_load_gpr(v1_t
, rs
);
20556 gen_load_gpr(v2_t
, rt
);
20561 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20565 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20569 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20571 case NM_CMPU_EQ_QB
:
20573 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20575 case NM_CMPU_LT_QB
:
20577 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20579 case NM_CMPU_LE_QB
:
20581 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20583 case NM_CMPGU_EQ_QB
:
20585 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20586 gen_store_gpr(v1_t
, ret
);
20588 case NM_CMPGU_LT_QB
:
20590 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20591 gen_store_gpr(v1_t
, ret
);
20593 case NM_CMPGU_LE_QB
:
20595 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20596 gen_store_gpr(v1_t
, ret
);
20598 case NM_CMPGDU_EQ_QB
:
20600 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20601 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20602 gen_store_gpr(v1_t
, ret
);
20604 case NM_CMPGDU_LT_QB
:
20606 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20607 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20608 gen_store_gpr(v1_t
, ret
);
20610 case NM_CMPGDU_LE_QB
:
20612 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20613 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20614 gen_store_gpr(v1_t
, ret
);
20618 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20619 gen_store_gpr(v1_t
, ret
);
20623 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20624 gen_store_gpr(v1_t
, ret
);
20628 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20629 gen_store_gpr(v1_t
, ret
);
20633 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20634 gen_store_gpr(v1_t
, ret
);
20638 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20639 gen_store_gpr(v1_t
, ret
);
20643 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20644 gen_store_gpr(v1_t
, ret
);
20648 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20649 gen_store_gpr(v1_t
, ret
);
20653 switch (extract32(ctx
->opcode
, 10, 1)) {
20656 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20657 gen_store_gpr(v1_t
, ret
);
20661 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20662 gen_store_gpr(v1_t
, ret
);
20666 case NM_ADDQH_R_PH
:
20668 switch (extract32(ctx
->opcode
, 10, 1)) {
20671 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20672 gen_store_gpr(v1_t
, ret
);
20676 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20677 gen_store_gpr(v1_t
, ret
);
20683 switch (extract32(ctx
->opcode
, 10, 1)) {
20686 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20687 gen_store_gpr(v1_t
, ret
);
20691 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20692 gen_store_gpr(v1_t
, ret
);
20698 switch (extract32(ctx
->opcode
, 10, 1)) {
20701 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20702 gen_store_gpr(v1_t
, ret
);
20706 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20707 gen_store_gpr(v1_t
, ret
);
20713 switch (extract32(ctx
->opcode
, 10, 1)) {
20716 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20717 gen_store_gpr(v1_t
, ret
);
20721 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20722 gen_store_gpr(v1_t
, ret
);
20726 case NM_ADDUH_R_QB
:
20728 switch (extract32(ctx
->opcode
, 10, 1)) {
20731 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20732 gen_store_gpr(v1_t
, ret
);
20736 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20737 gen_store_gpr(v1_t
, ret
);
20741 case NM_SHRAV_R_PH
:
20743 switch (extract32(ctx
->opcode
, 10, 1)) {
20746 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20747 gen_store_gpr(v1_t
, ret
);
20751 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20752 gen_store_gpr(v1_t
, ret
);
20756 case NM_SHRAV_R_QB
:
20758 switch (extract32(ctx
->opcode
, 10, 1)) {
20761 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20762 gen_store_gpr(v1_t
, ret
);
20766 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20767 gen_store_gpr(v1_t
, ret
);
20773 switch (extract32(ctx
->opcode
, 10, 1)) {
20776 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20777 gen_store_gpr(v1_t
, ret
);
20781 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20782 gen_store_gpr(v1_t
, ret
);
20786 case NM_SUBQH_R_PH
:
20788 switch (extract32(ctx
->opcode
, 10, 1)) {
20791 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20792 gen_store_gpr(v1_t
, ret
);
20796 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20797 gen_store_gpr(v1_t
, ret
);
20803 switch (extract32(ctx
->opcode
, 10, 1)) {
20806 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20807 gen_store_gpr(v1_t
, ret
);
20811 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20812 gen_store_gpr(v1_t
, ret
);
20818 switch (extract32(ctx
->opcode
, 10, 1)) {
20821 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20822 gen_store_gpr(v1_t
, ret
);
20826 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20827 gen_store_gpr(v1_t
, ret
);
20833 switch (extract32(ctx
->opcode
, 10, 1)) {
20836 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20837 gen_store_gpr(v1_t
, ret
);
20841 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20842 gen_store_gpr(v1_t
, ret
);
20846 case NM_SUBUH_R_QB
:
20848 switch (extract32(ctx
->opcode
, 10, 1)) {
20851 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20852 gen_store_gpr(v1_t
, ret
);
20856 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20857 gen_store_gpr(v1_t
, ret
);
20861 case NM_SHLLV_S_PH
:
20863 switch (extract32(ctx
->opcode
, 10, 1)) {
20866 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20867 gen_store_gpr(v1_t
, ret
);
20871 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20872 gen_store_gpr(v1_t
, ret
);
20876 case NM_PRECR_SRA_R_PH_W
:
20878 switch (extract32(ctx
->opcode
, 10, 1)) {
20880 /* PRECR_SRA_PH_W */
20882 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20883 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20885 gen_store_gpr(v1_t
, rt
);
20886 tcg_temp_free_i32(sa_t
);
20890 /* PRECR_SRA_R_PH_W */
20892 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20893 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20895 gen_store_gpr(v1_t
, rt
);
20896 tcg_temp_free_i32(sa_t
);
20901 case NM_MULEU_S_PH_QBL
:
20903 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20904 gen_store_gpr(v1_t
, ret
);
20906 case NM_MULEU_S_PH_QBR
:
20908 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20909 gen_store_gpr(v1_t
, ret
);
20911 case NM_MULQ_RS_PH
:
20913 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20914 gen_store_gpr(v1_t
, ret
);
20918 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20919 gen_store_gpr(v1_t
, ret
);
20923 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20924 gen_store_gpr(v1_t
, ret
);
20928 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20929 gen_store_gpr(v1_t
, ret
);
20933 gen_load_gpr(t0
, rs
);
20935 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20937 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20941 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20942 gen_store_gpr(v1_t
, ret
);
20946 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20947 gen_store_gpr(v1_t
, ret
);
20951 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20952 gen_store_gpr(v1_t
, ret
);
20956 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20957 gen_store_gpr(v1_t
, ret
);
20961 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20962 gen_store_gpr(v1_t
, ret
);
20966 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20967 gen_store_gpr(v1_t
, ret
);
20972 TCGv tv0
= tcg_temp_new();
20973 TCGv tv1
= tcg_temp_new();
20974 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
20976 tcg_gen_movi_tl(tv0
, rd
>> 3);
20977 tcg_gen_movi_tl(tv1
, imm
);
20978 gen_helper_shilo(tv0
, tv1
, cpu_env
);
20981 case NM_MULEQ_S_W_PHL
:
20983 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
20984 gen_store_gpr(v1_t
, ret
);
20986 case NM_MULEQ_S_W_PHR
:
20988 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
20989 gen_store_gpr(v1_t
, ret
);
20993 switch (extract32(ctx
->opcode
, 10, 1)) {
20996 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20997 gen_store_gpr(v1_t
, ret
);
21001 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21002 gen_store_gpr(v1_t
, ret
);
21006 case NM_PRECR_QB_PH
:
21008 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
21009 gen_store_gpr(v1_t
, ret
);
21011 case NM_PRECRQ_QB_PH
:
21013 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
21014 gen_store_gpr(v1_t
, ret
);
21016 case NM_PRECRQ_PH_W
:
21018 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
21019 gen_store_gpr(v1_t
, ret
);
21021 case NM_PRECRQ_RS_PH_W
:
21023 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
21024 gen_store_gpr(v1_t
, ret
);
21026 case NM_PRECRQU_S_QB_PH
:
21028 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21029 gen_store_gpr(v1_t
, ret
);
21033 tcg_gen_movi_tl(t0
, rd
);
21034 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
21035 gen_store_gpr(v1_t
, rt
);
21039 tcg_gen_movi_tl(t0
, rd
>> 1);
21040 switch (extract32(ctx
->opcode
, 10, 1)) {
21043 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
21044 gen_store_gpr(v1_t
, rt
);
21048 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
21049 gen_store_gpr(v1_t
, rt
);
21055 tcg_gen_movi_tl(t0
, rd
>> 1);
21056 switch (extract32(ctx
->opcode
, 10, 2)) {
21059 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
21060 gen_store_gpr(v1_t
, rt
);
21064 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
21065 gen_store_gpr(v1_t
, rt
);
21068 generate_exception_end(ctx
, EXCP_RI
);
21074 tcg_gen_movi_tl(t0
, rd
);
21075 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
21076 gen_store_gpr(v1_t
, rt
);
21082 imm
= sextract32(ctx
->opcode
, 11, 11);
21083 imm
= (int16_t)(imm
<< 6) >> 6;
21085 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
21090 generate_exception_end(ctx
, EXCP_RI
);
21095 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21103 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 2);
21104 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
21106 rt
= extract32(ctx
->opcode
, 21, 5);
21107 rs
= extract32(ctx
->opcode
, 16, 5);
21108 rd
= extract32(ctx
->opcode
, 11, 5);
21110 op
= extract32(ctx
->opcode
, 26, 6);
21115 switch (extract32(ctx
->opcode
, 19, 2)) {
21118 generate_exception_end(ctx
, EXCP_RI
);
21121 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
21122 generate_exception_end(ctx
, EXCP_SYSCALL
);
21124 generate_exception_end(ctx
, EXCP_RI
);
21128 generate_exception_end(ctx
, EXCP_BREAK
);
21131 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21132 gen_helper_do_semihosting(cpu_env
);
21134 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21135 generate_exception_end(ctx
, EXCP_RI
);
21137 generate_exception_end(ctx
, EXCP_DBp
);
21144 imm
= extract32(ctx
->opcode
, 0, 16);
21146 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21148 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21150 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21155 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21156 extract32(ctx
->opcode
, 1, 20) << 1;
21157 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21158 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21162 switch (ctx
->opcode
& 0x07) {
21164 gen_pool32a0_nanomips_insn(env
, ctx
);
21168 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21169 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21173 switch (extract32(ctx
->opcode
, 3, 3)) {
21175 gen_p_lsx(ctx
, rd
, rs
, rt
);
21179 * In nanoMIPS, the shift field directly encodes the shift
21180 * amount, meaning that the supported shift values are in
21181 * the range 0 to 3 (instead of 1 to 4 in MIPSR6).
21183 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
21184 extract32(ctx
->opcode
, 9, 2) - 1);
21187 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21190 gen_pool32axf_nanomips_insn(env
, ctx
);
21193 generate_exception_end(ctx
, EXCP_RI
);
21198 generate_exception_end(ctx
, EXCP_RI
);
21203 switch (ctx
->opcode
& 0x03) {
21206 offset
= extract32(ctx
->opcode
, 0, 21);
21207 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21211 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21214 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21217 generate_exception_end(ctx
, EXCP_RI
);
21223 insn
= cpu_lduw_code(env
, ctx
->base
.pc_next
+ 4);
21224 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21225 switch (extract32(ctx
->opcode
, 16, 5)) {
21229 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21235 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21236 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21242 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21248 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21251 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21258 t0
= tcg_temp_new();
21260 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21263 tcg_gen_movi_tl(t0
, addr
);
21264 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21272 t0
= tcg_temp_new();
21273 t1
= tcg_temp_new();
21275 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21278 tcg_gen_movi_tl(t0
, addr
);
21279 gen_load_gpr(t1
, rt
);
21281 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21288 generate_exception_end(ctx
, EXCP_RI
);
21294 switch (extract32(ctx
->opcode
, 12, 4)) {
21296 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21299 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21302 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21305 switch (extract32(ctx
->opcode
, 20, 1)) {
21307 switch (ctx
->opcode
& 3) {
21309 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21310 extract32(ctx
->opcode
, 2, 1),
21311 extract32(ctx
->opcode
, 3, 9) << 3);
21314 case NM_RESTORE_JRC
:
21315 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21316 extract32(ctx
->opcode
, 2, 1),
21317 extract32(ctx
->opcode
, 3, 9) << 3);
21318 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21319 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21323 generate_exception_end(ctx
, EXCP_RI
);
21328 generate_exception_end(ctx
, EXCP_RI
);
21333 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21336 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21340 TCGv t0
= tcg_temp_new();
21342 imm
= extract32(ctx
->opcode
, 0, 12);
21343 gen_load_gpr(t0
, rs
);
21344 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21345 gen_store_gpr(t0
, rt
);
21351 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21352 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21356 int shift
= extract32(ctx
->opcode
, 0, 5);
21357 switch (extract32(ctx
->opcode
, 5, 4)) {
21359 if (rt
== 0 && shift
== 0) {
21361 } else if (rt
== 0 && shift
== 3) {
21362 /* EHB - treat as NOP */
21363 } else if (rt
== 0 && shift
== 5) {
21364 /* PAUSE - treat as NOP */
21365 } else if (rt
== 0 && shift
== 6) {
21367 gen_sync(extract32(ctx
->opcode
, 16, 5));
21370 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21371 extract32(ctx
->opcode
, 0, 5));
21375 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21376 extract32(ctx
->opcode
, 0, 5));
21379 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21380 extract32(ctx
->opcode
, 0, 5));
21383 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21384 extract32(ctx
->opcode
, 0, 5));
21392 TCGv t0
= tcg_temp_new();
21393 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21394 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21396 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21398 gen_load_gpr(t0
, rs
);
21399 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21402 tcg_temp_free_i32(shift
);
21403 tcg_temp_free_i32(shiftx
);
21404 tcg_temp_free_i32(stripe
);
21408 switch (((ctx
->opcode
>> 10) & 2) |
21409 (extract32(ctx
->opcode
, 5, 1))) {
21412 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21413 extract32(ctx
->opcode
, 6, 5));
21416 generate_exception_end(ctx
, EXCP_RI
);
21421 switch (((ctx
->opcode
>> 10) & 2) |
21422 (extract32(ctx
->opcode
, 5, 1))) {
21425 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21426 extract32(ctx
->opcode
, 6, 5));
21429 generate_exception_end(ctx
, EXCP_RI
);
21434 generate_exception_end(ctx
, EXCP_RI
);
21439 gen_pool32f_nanomips_insn(ctx
);
21444 switch (extract32(ctx
->opcode
, 1, 1)) {
21447 tcg_gen_movi_tl(cpu_gpr
[rt
],
21448 sextract32(ctx
->opcode
, 0, 1) << 31 |
21449 extract32(ctx
->opcode
, 2, 10) << 21 |
21450 extract32(ctx
->opcode
, 12, 9) << 12);
21455 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21456 extract32(ctx
->opcode
, 2, 10) << 21 |
21457 extract32(ctx
->opcode
, 12, 9) << 12;
21459 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21460 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21467 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21469 switch (extract32(ctx
->opcode
, 18, 3)) {
21471 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21474 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21477 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21481 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21486 switch (ctx
->opcode
& 1) {
21488 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21491 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21497 switch (ctx
->opcode
& 1) {
21499 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21502 generate_exception_end(ctx
, EXCP_RI
);
21508 switch (ctx
->opcode
& 0x3) {
21510 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21513 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21516 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21519 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21524 generate_exception_end(ctx
, EXCP_RI
);
21531 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21533 switch (extract32(ctx
->opcode
, 12, 4)) {
21538 * Break the TB to be able to sync copied instructions
21541 ctx
->base
.is_jmp
= DISAS_STOP
;
21544 /* Treat as NOP. */
21548 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21551 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21554 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21557 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21560 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21563 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21566 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21569 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21572 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21575 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21578 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21581 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21584 generate_exception_end(ctx
, EXCP_RI
);
21591 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21592 extract32(ctx
->opcode
, 0, 8);
21594 switch (extract32(ctx
->opcode
, 8, 3)) {
21596 switch (extract32(ctx
->opcode
, 11, 4)) {
21598 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21601 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21604 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21607 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21610 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21613 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21616 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21619 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21622 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21625 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21628 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21631 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21637 * Break the TB to be able to sync copied instructions
21640 ctx
->base
.is_jmp
= DISAS_STOP
;
21643 /* Treat as NOP. */
21647 generate_exception_end(ctx
, EXCP_RI
);
21652 switch (extract32(ctx
->opcode
, 11, 4)) {
21657 TCGv t0
= tcg_temp_new();
21658 TCGv t1
= tcg_temp_new();
21660 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21662 switch (extract32(ctx
->opcode
, 11, 4)) {
21664 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21666 gen_store_gpr(t0
, rt
);
21669 gen_load_gpr(t1
, rt
);
21670 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21679 switch (ctx
->opcode
& 0x03) {
21681 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21685 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21690 switch (ctx
->opcode
& 0x03) {
21692 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21696 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21702 check_cp0_enabled(ctx
);
21703 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21704 gen_cache_operation(ctx
, rt
, rs
, s
);
21710 switch (extract32(ctx
->opcode
, 11, 4)) {
21713 check_cp0_enabled(ctx
);
21714 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21718 check_cp0_enabled(ctx
);
21719 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21723 check_cp0_enabled(ctx
);
21724 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21728 /* case NM_SYNCIE */
21730 check_cp0_enabled(ctx
);
21732 * Break the TB to be able to sync copied instructions
21735 ctx
->base
.is_jmp
= DISAS_STOP
;
21737 /* case NM_PREFE */
21739 check_cp0_enabled(ctx
);
21740 /* Treat as NOP. */
21745 check_cp0_enabled(ctx
);
21746 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21750 check_cp0_enabled(ctx
);
21751 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21755 check_cp0_enabled(ctx
);
21756 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21759 check_nms_dl_il_sl_tl_l2c(ctx
);
21760 gen_cache_operation(ctx
, rt
, rs
, s
);
21764 check_cp0_enabled(ctx
);
21765 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21769 check_cp0_enabled(ctx
);
21770 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21773 switch (extract32(ctx
->opcode
, 2, 2)) {
21777 check_cp0_enabled(ctx
);
21778 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21783 check_cp0_enabled(ctx
);
21784 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21787 generate_exception_end(ctx
, EXCP_RI
);
21792 switch (extract32(ctx
->opcode
, 2, 2)) {
21796 check_cp0_enabled(ctx
);
21797 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21802 check_cp0_enabled(ctx
);
21803 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21807 generate_exception_end(ctx
, EXCP_RI
);
21817 int count
= extract32(ctx
->opcode
, 12, 3);
21820 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21821 extract32(ctx
->opcode
, 0, 8);
21822 TCGv va
= tcg_temp_new();
21823 TCGv t1
= tcg_temp_new();
21824 TCGMemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21825 NM_P_LS_UAWM
? MO_UNALN
: 0;
21827 count
= (count
== 0) ? 8 : count
;
21828 while (counter
!= count
) {
21829 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21830 int this_offset
= offset
+ (counter
<< 2);
21832 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21834 switch (extract32(ctx
->opcode
, 11, 1)) {
21836 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21838 gen_store_gpr(t1
, this_rt
);
21839 if ((this_rt
== rs
) &&
21840 (counter
!= (count
- 1))) {
21841 /* UNPREDICTABLE */
21845 this_rt
= (rt
== 0) ? 0 : this_rt
;
21846 gen_load_gpr(t1
, this_rt
);
21847 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21858 generate_exception_end(ctx
, EXCP_RI
);
21866 TCGv t0
= tcg_temp_new();
21867 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21868 extract32(ctx
->opcode
, 1, 20) << 1;
21869 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21870 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21871 extract32(ctx
->opcode
, 21, 3));
21872 gen_load_gpr(t0
, rt
);
21873 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21874 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21880 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21881 extract32(ctx
->opcode
, 1, 24) << 1;
21883 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21885 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21888 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21893 switch (extract32(ctx
->opcode
, 12, 4)) {
21896 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21899 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21902 generate_exception_end(ctx
, EXCP_RI
);
21908 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21909 extract32(ctx
->opcode
, 1, 13) << 1;
21910 switch (extract32(ctx
->opcode
, 14, 2)) {
21913 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21916 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21917 extract32(ctx
->opcode
, 1, 13) << 1;
21918 check_cp1_enabled(ctx
);
21919 switch (extract32(ctx
->opcode
, 16, 5)) {
21921 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21924 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21929 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21930 extract32(ctx
->opcode
, 0, 1) << 13;
21932 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21937 generate_exception_end(ctx
, EXCP_RI
);
21943 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21945 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21949 if (rs
== rt
|| rt
== 0) {
21950 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21951 } else if (rs
== 0) {
21952 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21954 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21962 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21963 extract32(ctx
->opcode
, 1, 13) << 1;
21964 switch (extract32(ctx
->opcode
, 14, 2)) {
21967 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21970 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21972 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21974 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
21978 if (rs
== 0 || rs
== rt
) {
21980 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
21982 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
21986 generate_exception_end(ctx
, EXCP_RI
);
21993 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
21994 extract32(ctx
->opcode
, 1, 10) << 1;
21995 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
21997 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
22002 generate_exception_end(ctx
, EXCP_RI
);
22008 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
22011 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22012 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22013 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
22017 /* make sure instructions are on a halfword boundary */
22018 if (ctx
->base
.pc_next
& 0x1) {
22019 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
22020 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
22021 tcg_temp_free(tmp
);
22022 generate_exception_end(ctx
, EXCP_AdEL
);
22026 op
= extract32(ctx
->opcode
, 10, 6);
22029 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22032 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
22033 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
22036 switch (extract32(ctx
->opcode
, 3, 2)) {
22037 case NM_P16_SYSCALL
:
22038 if (extract32(ctx
->opcode
, 2, 1) == 0) {
22039 generate_exception_end(ctx
, EXCP_SYSCALL
);
22041 generate_exception_end(ctx
, EXCP_RI
);
22045 generate_exception_end(ctx
, EXCP_BREAK
);
22048 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
22049 gen_helper_do_semihosting(cpu_env
);
22051 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22052 generate_exception_end(ctx
, EXCP_RI
);
22054 generate_exception_end(ctx
, EXCP_DBp
);
22059 generate_exception_end(ctx
, EXCP_RI
);
22066 int shift
= extract32(ctx
->opcode
, 0, 3);
22068 shift
= (shift
== 0) ? 8 : shift
;
22070 switch (extract32(ctx
->opcode
, 3, 1)) {
22078 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
22082 switch (ctx
->opcode
& 1) {
22084 gen_pool16c_nanomips_insn(ctx
);
22087 gen_ldxs(ctx
, rt
, rs
, rd
);
22092 switch (extract32(ctx
->opcode
, 6, 1)) {
22094 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
22095 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
22098 generate_exception_end(ctx
, EXCP_RI
);
22103 switch (extract32(ctx
->opcode
, 3, 1)) {
22105 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
22106 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
22108 case NM_P_ADDIURS5
:
22109 rt
= extract32(ctx
->opcode
, 5, 5);
22111 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
22112 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
22113 (extract32(ctx
->opcode
, 0, 3));
22114 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
22120 switch (ctx
->opcode
& 0x1) {
22122 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
22125 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
22130 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22131 extract32(ctx
->opcode
, 5, 3);
22132 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22133 extract32(ctx
->opcode
, 0, 3);
22134 rt
= decode_gpr_gpr4(rt
);
22135 rs
= decode_gpr_gpr4(rs
);
22136 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22137 (extract32(ctx
->opcode
, 3, 1))) {
22140 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22144 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22147 generate_exception_end(ctx
, EXCP_RI
);
22153 int imm
= extract32(ctx
->opcode
, 0, 7);
22154 imm
= (imm
== 0x7f ? -1 : imm
);
22156 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22162 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22163 u
= (u
== 12) ? 0xff :
22164 (u
== 13) ? 0xffff : u
;
22165 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22169 offset
= extract32(ctx
->opcode
, 0, 2);
22170 switch (extract32(ctx
->opcode
, 2, 2)) {
22172 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22175 rt
= decode_gpr_gpr3_src_store(
22176 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22177 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22180 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22183 generate_exception_end(ctx
, EXCP_RI
);
22188 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22189 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22191 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22194 rt
= decode_gpr_gpr3_src_store(
22195 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22196 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22199 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22202 generate_exception_end(ctx
, EXCP_RI
);
22207 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22208 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22211 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22212 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22213 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22217 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22218 extract32(ctx
->opcode
, 5, 3);
22219 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22220 extract32(ctx
->opcode
, 0, 3);
22221 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22222 (extract32(ctx
->opcode
, 8, 1) << 2);
22223 rt
= decode_gpr_gpr4(rt
);
22224 rs
= decode_gpr_gpr4(rs
);
22225 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22229 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22230 extract32(ctx
->opcode
, 5, 3);
22231 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22232 extract32(ctx
->opcode
, 0, 3);
22233 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22234 (extract32(ctx
->opcode
, 8, 1) << 2);
22235 rt
= decode_gpr_gpr4_zero(rt
);
22236 rs
= decode_gpr_gpr4(rs
);
22237 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22240 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22241 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22244 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22245 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22246 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22249 rt
= decode_gpr_gpr3_src_store(
22250 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22251 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22252 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22253 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22256 rt
= decode_gpr_gpr3_src_store(
22257 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22258 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22259 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22262 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22263 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22264 (extract32(ctx
->opcode
, 1, 9) << 1));
22267 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22268 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22269 (extract32(ctx
->opcode
, 1, 9) << 1));
22272 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22273 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22274 (extract32(ctx
->opcode
, 1, 6) << 1));
22277 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22278 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22279 (extract32(ctx
->opcode
, 1, 6) << 1));
22282 switch (ctx
->opcode
& 0xf) {
22285 switch (extract32(ctx
->opcode
, 4, 1)) {
22287 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22288 extract32(ctx
->opcode
, 5, 5), 0, 0);
22291 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22292 extract32(ctx
->opcode
, 5, 5), 31, 0);
22299 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22300 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22301 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22302 extract32(ctx
->opcode
, 0, 4) << 1);
22309 int count
= extract32(ctx
->opcode
, 0, 4);
22310 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22312 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22313 switch (extract32(ctx
->opcode
, 8, 1)) {
22315 gen_save(ctx
, rt
, count
, 0, u
);
22317 case NM_RESTORE_JRC16
:
22318 gen_restore(ctx
, rt
, count
, 0, u
);
22319 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22328 static const int gpr2reg1
[] = {4, 5, 6, 7};
22329 static const int gpr2reg2
[] = {5, 6, 7, 8};
22331 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22332 extract32(ctx
->opcode
, 8, 1);
22333 int r1
= gpr2reg1
[rd2
];
22334 int r2
= gpr2reg2
[rd2
];
22335 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22336 extract32(ctx
->opcode
, 0, 3);
22337 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22338 extract32(ctx
->opcode
, 5, 3);
22339 TCGv t0
= tcg_temp_new();
22340 TCGv t1
= tcg_temp_new();
22341 if (op
== NM_MOVEP
) {
22344 rs
= decode_gpr_gpr4_zero(r3
);
22345 rt
= decode_gpr_gpr4_zero(r4
);
22347 rd
= decode_gpr_gpr4(r3
);
22348 re
= decode_gpr_gpr4(r4
);
22352 gen_load_gpr(t0
, rs
);
22353 gen_load_gpr(t1
, rt
);
22354 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22355 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22361 return decode_nanomips_32_48_opc(env
, ctx
);
22368 /* SmartMIPS extension to MIPS32 */
22370 #if defined(TARGET_MIPS64)
22372 /* MDMX extension to MIPS64 */
22376 /* MIPSDSP functions. */
22377 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22378 int rd
, int base
, int offset
)
22383 t0
= tcg_temp_new();
22386 gen_load_gpr(t0
, offset
);
22387 } else if (offset
== 0) {
22388 gen_load_gpr(t0
, base
);
22390 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22395 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22396 gen_store_gpr(t0
, rd
);
22399 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22400 gen_store_gpr(t0
, rd
);
22403 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22404 gen_store_gpr(t0
, rd
);
22406 #if defined(TARGET_MIPS64)
22408 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22409 gen_store_gpr(t0
, rd
);
22416 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22417 int ret
, int v1
, int v2
)
22423 /* Treat as NOP. */
22427 v1_t
= tcg_temp_new();
22428 v2_t
= tcg_temp_new();
22430 gen_load_gpr(v1_t
, v1
);
22431 gen_load_gpr(v2_t
, v2
);
22434 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22435 case OPC_MULT_G_2E
:
22439 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22441 case OPC_ADDUH_R_QB
:
22442 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22445 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22447 case OPC_ADDQH_R_PH
:
22448 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22451 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22453 case OPC_ADDQH_R_W
:
22454 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22457 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22459 case OPC_SUBUH_R_QB
:
22460 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22463 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22465 case OPC_SUBQH_R_PH
:
22466 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22469 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22471 case OPC_SUBQH_R_W
:
22472 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22476 case OPC_ABSQ_S_PH_DSP
:
22478 case OPC_ABSQ_S_QB
:
22480 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22482 case OPC_ABSQ_S_PH
:
22484 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22488 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22490 case OPC_PRECEQ_W_PHL
:
22492 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22493 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22495 case OPC_PRECEQ_W_PHR
:
22497 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22498 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22499 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22501 case OPC_PRECEQU_PH_QBL
:
22503 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22505 case OPC_PRECEQU_PH_QBR
:
22507 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22509 case OPC_PRECEQU_PH_QBLA
:
22511 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22513 case OPC_PRECEQU_PH_QBRA
:
22515 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22517 case OPC_PRECEU_PH_QBL
:
22519 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22521 case OPC_PRECEU_PH_QBR
:
22523 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22525 case OPC_PRECEU_PH_QBLA
:
22527 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22529 case OPC_PRECEU_PH_QBRA
:
22531 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22535 case OPC_ADDU_QB_DSP
:
22539 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22541 case OPC_ADDQ_S_PH
:
22543 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22547 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22551 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22553 case OPC_ADDU_S_QB
:
22555 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22559 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22561 case OPC_ADDU_S_PH
:
22563 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22567 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22569 case OPC_SUBQ_S_PH
:
22571 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22575 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22579 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22581 case OPC_SUBU_S_QB
:
22583 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22587 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22589 case OPC_SUBU_S_PH
:
22591 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22595 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22599 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22603 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22605 case OPC_RADDU_W_QB
:
22607 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22611 case OPC_CMPU_EQ_QB_DSP
:
22613 case OPC_PRECR_QB_PH
:
22615 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22617 case OPC_PRECRQ_QB_PH
:
22619 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22621 case OPC_PRECR_SRA_PH_W
:
22624 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22625 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22627 tcg_temp_free_i32(sa_t
);
22630 case OPC_PRECR_SRA_R_PH_W
:
22633 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22634 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22636 tcg_temp_free_i32(sa_t
);
22639 case OPC_PRECRQ_PH_W
:
22641 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22643 case OPC_PRECRQ_RS_PH_W
:
22645 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22647 case OPC_PRECRQU_S_QB_PH
:
22649 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22653 #ifdef TARGET_MIPS64
22654 case OPC_ABSQ_S_QH_DSP
:
22656 case OPC_PRECEQ_L_PWL
:
22658 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22660 case OPC_PRECEQ_L_PWR
:
22662 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22664 case OPC_PRECEQ_PW_QHL
:
22666 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22668 case OPC_PRECEQ_PW_QHR
:
22670 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22672 case OPC_PRECEQ_PW_QHLA
:
22674 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22676 case OPC_PRECEQ_PW_QHRA
:
22678 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22680 case OPC_PRECEQU_QH_OBL
:
22682 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22684 case OPC_PRECEQU_QH_OBR
:
22686 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22688 case OPC_PRECEQU_QH_OBLA
:
22690 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22692 case OPC_PRECEQU_QH_OBRA
:
22694 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22696 case OPC_PRECEU_QH_OBL
:
22698 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22700 case OPC_PRECEU_QH_OBR
:
22702 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22704 case OPC_PRECEU_QH_OBLA
:
22706 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22708 case OPC_PRECEU_QH_OBRA
:
22710 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22712 case OPC_ABSQ_S_OB
:
22714 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22716 case OPC_ABSQ_S_PW
:
22718 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22720 case OPC_ABSQ_S_QH
:
22722 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22726 case OPC_ADDU_OB_DSP
:
22728 case OPC_RADDU_L_OB
:
22730 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22734 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22736 case OPC_SUBQ_S_PW
:
22738 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22742 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22744 case OPC_SUBQ_S_QH
:
22746 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22750 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22752 case OPC_SUBU_S_OB
:
22754 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22758 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22760 case OPC_SUBU_S_QH
:
22762 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22766 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22768 case OPC_SUBUH_R_OB
:
22770 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22774 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22776 case OPC_ADDQ_S_PW
:
22778 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22782 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22784 case OPC_ADDQ_S_QH
:
22786 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22790 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22792 case OPC_ADDU_S_OB
:
22794 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22798 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22800 case OPC_ADDU_S_QH
:
22802 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22806 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22808 case OPC_ADDUH_R_OB
:
22810 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22814 case OPC_CMPU_EQ_OB_DSP
:
22816 case OPC_PRECR_OB_QH
:
22818 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22820 case OPC_PRECR_SRA_QH_PW
:
22823 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22824 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22825 tcg_temp_free_i32(ret_t
);
22828 case OPC_PRECR_SRA_R_QH_PW
:
22831 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22832 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22833 tcg_temp_free_i32(sa_v
);
22836 case OPC_PRECRQ_OB_QH
:
22838 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22840 case OPC_PRECRQ_PW_L
:
22842 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22844 case OPC_PRECRQ_QH_PW
:
22846 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22848 case OPC_PRECRQ_RS_QH_PW
:
22850 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22852 case OPC_PRECRQU_S_OB_QH
:
22854 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22861 tcg_temp_free(v1_t
);
22862 tcg_temp_free(v2_t
);
22865 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22866 int ret
, int v1
, int v2
)
22874 /* Treat as NOP. */
22878 t0
= tcg_temp_new();
22879 v1_t
= tcg_temp_new();
22880 v2_t
= tcg_temp_new();
22882 tcg_gen_movi_tl(t0
, v1
);
22883 gen_load_gpr(v1_t
, v1
);
22884 gen_load_gpr(v2_t
, v2
);
22887 case OPC_SHLL_QB_DSP
:
22889 op2
= MASK_SHLL_QB(ctx
->opcode
);
22893 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22897 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22901 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22905 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22907 case OPC_SHLL_S_PH
:
22909 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22911 case OPC_SHLLV_S_PH
:
22913 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22917 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22919 case OPC_SHLLV_S_W
:
22921 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22925 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22929 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22933 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22937 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22941 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22943 case OPC_SHRA_R_QB
:
22945 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22949 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22951 case OPC_SHRAV_R_QB
:
22953 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22957 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22959 case OPC_SHRA_R_PH
:
22961 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22965 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22967 case OPC_SHRAV_R_PH
:
22969 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22973 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
22975 case OPC_SHRAV_R_W
:
22977 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22979 default: /* Invalid */
22980 MIPS_INVAL("MASK SHLL.QB");
22981 generate_exception_end(ctx
, EXCP_RI
);
22986 #ifdef TARGET_MIPS64
22987 case OPC_SHLL_OB_DSP
:
22988 op2
= MASK_SHLL_OB(ctx
->opcode
);
22992 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
22996 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
22998 case OPC_SHLL_S_PW
:
23000 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23002 case OPC_SHLLV_S_PW
:
23004 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23008 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23012 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23016 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23020 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23022 case OPC_SHLL_S_QH
:
23024 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23026 case OPC_SHLLV_S_QH
:
23028 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23032 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
23036 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23038 case OPC_SHRA_R_OB
:
23040 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
23042 case OPC_SHRAV_R_OB
:
23044 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23048 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
23052 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23054 case OPC_SHRA_R_PW
:
23056 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
23058 case OPC_SHRAV_R_PW
:
23060 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23064 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
23068 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23070 case OPC_SHRA_R_QH
:
23072 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
23074 case OPC_SHRAV_R_QH
:
23076 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23080 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
23084 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23088 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
23092 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23094 default: /* Invalid */
23095 MIPS_INVAL("MASK SHLL.OB");
23096 generate_exception_end(ctx
, EXCP_RI
);
23104 tcg_temp_free(v1_t
);
23105 tcg_temp_free(v2_t
);
23108 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23109 int ret
, int v1
, int v2
, int check_ret
)
23115 if ((ret
== 0) && (check_ret
== 1)) {
23116 /* Treat as NOP. */
23120 t0
= tcg_temp_new_i32();
23121 v1_t
= tcg_temp_new();
23122 v2_t
= tcg_temp_new();
23124 tcg_gen_movi_i32(t0
, ret
);
23125 gen_load_gpr(v1_t
, v1
);
23126 gen_load_gpr(v2_t
, v2
);
23130 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23131 * the same mask and op1.
23133 case OPC_MULT_G_2E
:
23137 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23140 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23143 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23145 case OPC_MULQ_RS_W
:
23146 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23150 case OPC_DPA_W_PH_DSP
:
23152 case OPC_DPAU_H_QBL
:
23154 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23156 case OPC_DPAU_H_QBR
:
23158 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23160 case OPC_DPSU_H_QBL
:
23162 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23164 case OPC_DPSU_H_QBR
:
23166 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23170 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23172 case OPC_DPAX_W_PH
:
23174 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23176 case OPC_DPAQ_S_W_PH
:
23178 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23180 case OPC_DPAQX_S_W_PH
:
23182 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23184 case OPC_DPAQX_SA_W_PH
:
23186 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23190 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23192 case OPC_DPSX_W_PH
:
23194 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23196 case OPC_DPSQ_S_W_PH
:
23198 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23200 case OPC_DPSQX_S_W_PH
:
23202 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23204 case OPC_DPSQX_SA_W_PH
:
23206 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23208 case OPC_MULSAQ_S_W_PH
:
23210 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23212 case OPC_DPAQ_SA_L_W
:
23214 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23216 case OPC_DPSQ_SA_L_W
:
23218 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23220 case OPC_MAQ_S_W_PHL
:
23222 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23224 case OPC_MAQ_S_W_PHR
:
23226 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23228 case OPC_MAQ_SA_W_PHL
:
23230 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23232 case OPC_MAQ_SA_W_PHR
:
23234 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23236 case OPC_MULSA_W_PH
:
23238 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23242 #ifdef TARGET_MIPS64
23243 case OPC_DPAQ_W_QH_DSP
:
23245 int ac
= ret
& 0x03;
23246 tcg_gen_movi_i32(t0
, ac
);
23251 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23255 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23259 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23263 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23267 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23269 case OPC_DPAQ_S_W_QH
:
23271 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23273 case OPC_DPAQ_SA_L_PW
:
23275 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23277 case OPC_DPAU_H_OBL
:
23279 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23281 case OPC_DPAU_H_OBR
:
23283 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23287 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23289 case OPC_DPSQ_S_W_QH
:
23291 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23293 case OPC_DPSQ_SA_L_PW
:
23295 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23297 case OPC_DPSU_H_OBL
:
23299 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23301 case OPC_DPSU_H_OBR
:
23303 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23305 case OPC_MAQ_S_L_PWL
:
23307 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23309 case OPC_MAQ_S_L_PWR
:
23311 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23313 case OPC_MAQ_S_W_QHLL
:
23315 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23317 case OPC_MAQ_SA_W_QHLL
:
23319 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23321 case OPC_MAQ_S_W_QHLR
:
23323 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23325 case OPC_MAQ_SA_W_QHLR
:
23327 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23329 case OPC_MAQ_S_W_QHRL
:
23331 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23333 case OPC_MAQ_SA_W_QHRL
:
23335 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23337 case OPC_MAQ_S_W_QHRR
:
23339 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23341 case OPC_MAQ_SA_W_QHRR
:
23343 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23345 case OPC_MULSAQ_S_L_PW
:
23347 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23349 case OPC_MULSAQ_S_W_QH
:
23351 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23357 case OPC_ADDU_QB_DSP
:
23359 case OPC_MULEU_S_PH_QBL
:
23361 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23363 case OPC_MULEU_S_PH_QBR
:
23365 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23367 case OPC_MULQ_RS_PH
:
23369 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23371 case OPC_MULEQ_S_W_PHL
:
23373 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23375 case OPC_MULEQ_S_W_PHR
:
23377 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23379 case OPC_MULQ_S_PH
:
23381 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23385 #ifdef TARGET_MIPS64
23386 case OPC_ADDU_OB_DSP
:
23388 case OPC_MULEQ_S_PW_QHL
:
23390 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23392 case OPC_MULEQ_S_PW_QHR
:
23394 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23396 case OPC_MULEU_S_QH_OBL
:
23398 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23400 case OPC_MULEU_S_QH_OBR
:
23402 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23404 case OPC_MULQ_RS_QH
:
23406 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23413 tcg_temp_free_i32(t0
);
23414 tcg_temp_free(v1_t
);
23415 tcg_temp_free(v2_t
);
23418 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23426 /* Treat as NOP. */
23430 t0
= tcg_temp_new();
23431 val_t
= tcg_temp_new();
23432 gen_load_gpr(val_t
, val
);
23435 case OPC_ABSQ_S_PH_DSP
:
23439 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23444 target_long result
;
23445 imm
= (ctx
->opcode
>> 16) & 0xFF;
23446 result
= (uint32_t)imm
<< 24 |
23447 (uint32_t)imm
<< 16 |
23448 (uint32_t)imm
<< 8 |
23450 result
= (int32_t)result
;
23451 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23456 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23457 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23458 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23459 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23460 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23461 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23466 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23467 imm
= (int16_t)(imm
<< 6) >> 6;
23468 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23469 (target_long
)((int32_t)imm
<< 16 | \
23475 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23476 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23477 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23478 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23482 #ifdef TARGET_MIPS64
23483 case OPC_ABSQ_S_QH_DSP
:
23490 imm
= (ctx
->opcode
>> 16) & 0xFF;
23491 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23492 temp
= (temp
<< 16) | temp
;
23493 temp
= (temp
<< 32) | temp
;
23494 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23502 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23503 imm
= (int16_t)(imm
<< 6) >> 6;
23504 temp
= ((target_long
)imm
<< 32) \
23505 | ((target_long
)imm
& 0xFFFFFFFF);
23506 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23514 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23515 imm
= (int16_t)(imm
<< 6) >> 6;
23517 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23518 ((uint64_t)(uint16_t)imm
<< 32) |
23519 ((uint64_t)(uint16_t)imm
<< 16) |
23520 (uint64_t)(uint16_t)imm
;
23521 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23526 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23527 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23528 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23529 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23530 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23531 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23532 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23536 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23537 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23538 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23542 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23543 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23544 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23545 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23546 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23553 tcg_temp_free(val_t
);
23556 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23557 uint32_t op1
, uint32_t op2
,
23558 int ret
, int v1
, int v2
, int check_ret
)
23564 if ((ret
== 0) && (check_ret
== 1)) {
23565 /* Treat as NOP. */
23569 t1
= tcg_temp_new();
23570 v1_t
= tcg_temp_new();
23571 v2_t
= tcg_temp_new();
23573 gen_load_gpr(v1_t
, v1
);
23574 gen_load_gpr(v2_t
, v2
);
23577 case OPC_CMPU_EQ_QB_DSP
:
23579 case OPC_CMPU_EQ_QB
:
23581 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23583 case OPC_CMPU_LT_QB
:
23585 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23587 case OPC_CMPU_LE_QB
:
23589 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23591 case OPC_CMPGU_EQ_QB
:
23593 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23595 case OPC_CMPGU_LT_QB
:
23597 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23599 case OPC_CMPGU_LE_QB
:
23601 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23603 case OPC_CMPGDU_EQ_QB
:
23605 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23606 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23607 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23608 tcg_gen_shli_tl(t1
, t1
, 24);
23609 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23611 case OPC_CMPGDU_LT_QB
:
23613 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23614 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23615 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23616 tcg_gen_shli_tl(t1
, t1
, 24);
23617 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23619 case OPC_CMPGDU_LE_QB
:
23621 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23622 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23623 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23624 tcg_gen_shli_tl(t1
, t1
, 24);
23625 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23627 case OPC_CMP_EQ_PH
:
23629 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23631 case OPC_CMP_LT_PH
:
23633 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23635 case OPC_CMP_LE_PH
:
23637 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23641 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23645 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23647 case OPC_PACKRL_PH
:
23649 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23653 #ifdef TARGET_MIPS64
23654 case OPC_CMPU_EQ_OB_DSP
:
23656 case OPC_CMP_EQ_PW
:
23658 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23660 case OPC_CMP_LT_PW
:
23662 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23664 case OPC_CMP_LE_PW
:
23666 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23668 case OPC_CMP_EQ_QH
:
23670 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23672 case OPC_CMP_LT_QH
:
23674 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23676 case OPC_CMP_LE_QH
:
23678 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23680 case OPC_CMPGDU_EQ_OB
:
23682 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23684 case OPC_CMPGDU_LT_OB
:
23686 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23688 case OPC_CMPGDU_LE_OB
:
23690 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23692 case OPC_CMPGU_EQ_OB
:
23694 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23696 case OPC_CMPGU_LT_OB
:
23698 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23700 case OPC_CMPGU_LE_OB
:
23702 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23704 case OPC_CMPU_EQ_OB
:
23706 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23708 case OPC_CMPU_LT_OB
:
23710 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23712 case OPC_CMPU_LE_OB
:
23714 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23716 case OPC_PACKRL_PW
:
23718 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23722 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23726 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23730 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23738 tcg_temp_free(v1_t
);
23739 tcg_temp_free(v2_t
);
23742 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23743 uint32_t op1
, int rt
, int rs
, int sa
)
23750 /* Treat as NOP. */
23754 t0
= tcg_temp_new();
23755 gen_load_gpr(t0
, rs
);
23758 case OPC_APPEND_DSP
:
23759 switch (MASK_APPEND(ctx
->opcode
)) {
23762 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23764 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23768 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23769 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23770 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23771 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23773 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23777 if (sa
!= 0 && sa
!= 2) {
23778 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23779 tcg_gen_ext32u_tl(t0
, t0
);
23780 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23781 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23783 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23785 default: /* Invalid */
23786 MIPS_INVAL("MASK APPEND");
23787 generate_exception_end(ctx
, EXCP_RI
);
23791 #ifdef TARGET_MIPS64
23792 case OPC_DAPPEND_DSP
:
23793 switch (MASK_DAPPEND(ctx
->opcode
)) {
23796 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23800 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23801 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23802 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23806 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23807 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23808 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23813 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23814 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23815 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23816 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23819 default: /* Invalid */
23820 MIPS_INVAL("MASK DAPPEND");
23821 generate_exception_end(ctx
, EXCP_RI
);
23830 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23831 int ret
, int v1
, int v2
, int check_ret
)
23840 if ((ret
== 0) && (check_ret
== 1)) {
23841 /* Treat as NOP. */
23845 t0
= tcg_temp_new();
23846 t1
= tcg_temp_new();
23847 v1_t
= tcg_temp_new();
23848 v2_t
= tcg_temp_new();
23850 gen_load_gpr(v1_t
, v1
);
23851 gen_load_gpr(v2_t
, v2
);
23854 case OPC_EXTR_W_DSP
:
23858 tcg_gen_movi_tl(t0
, v2
);
23859 tcg_gen_movi_tl(t1
, v1
);
23860 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23863 tcg_gen_movi_tl(t0
, v2
);
23864 tcg_gen_movi_tl(t1
, v1
);
23865 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23867 case OPC_EXTR_RS_W
:
23868 tcg_gen_movi_tl(t0
, v2
);
23869 tcg_gen_movi_tl(t1
, v1
);
23870 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23873 tcg_gen_movi_tl(t0
, v2
);
23874 tcg_gen_movi_tl(t1
, v1
);
23875 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23877 case OPC_EXTRV_S_H
:
23878 tcg_gen_movi_tl(t0
, v2
);
23879 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23882 tcg_gen_movi_tl(t0
, v2
);
23883 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23885 case OPC_EXTRV_R_W
:
23886 tcg_gen_movi_tl(t0
, v2
);
23887 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23889 case OPC_EXTRV_RS_W
:
23890 tcg_gen_movi_tl(t0
, v2
);
23891 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23894 tcg_gen_movi_tl(t0
, v2
);
23895 tcg_gen_movi_tl(t1
, v1
);
23896 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23899 tcg_gen_movi_tl(t0
, v2
);
23900 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23903 tcg_gen_movi_tl(t0
, v2
);
23904 tcg_gen_movi_tl(t1
, v1
);
23905 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23908 tcg_gen_movi_tl(t0
, v2
);
23909 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23912 imm
= (ctx
->opcode
>> 20) & 0x3F;
23913 tcg_gen_movi_tl(t0
, ret
);
23914 tcg_gen_movi_tl(t1
, imm
);
23915 gen_helper_shilo(t0
, t1
, cpu_env
);
23918 tcg_gen_movi_tl(t0
, ret
);
23919 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23922 tcg_gen_movi_tl(t0
, ret
);
23923 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23926 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23927 tcg_gen_movi_tl(t0
, imm
);
23928 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23931 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23932 tcg_gen_movi_tl(t0
, imm
);
23933 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23937 #ifdef TARGET_MIPS64
23938 case OPC_DEXTR_W_DSP
:
23942 tcg_gen_movi_tl(t0
, ret
);
23943 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23947 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23948 int ac
= (ctx
->opcode
>> 11) & 0x03;
23949 tcg_gen_movi_tl(t0
, shift
);
23950 tcg_gen_movi_tl(t1
, ac
);
23951 gen_helper_dshilo(t0
, t1
, cpu_env
);
23956 int ac
= (ctx
->opcode
>> 11) & 0x03;
23957 tcg_gen_movi_tl(t0
, ac
);
23958 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23962 tcg_gen_movi_tl(t0
, v2
);
23963 tcg_gen_movi_tl(t1
, v1
);
23965 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23968 tcg_gen_movi_tl(t0
, v2
);
23969 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23972 tcg_gen_movi_tl(t0
, v2
);
23973 tcg_gen_movi_tl(t1
, v1
);
23974 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23977 tcg_gen_movi_tl(t0
, v2
);
23978 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23981 tcg_gen_movi_tl(t0
, v2
);
23982 tcg_gen_movi_tl(t1
, v1
);
23983 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23985 case OPC_DEXTR_R_L
:
23986 tcg_gen_movi_tl(t0
, v2
);
23987 tcg_gen_movi_tl(t1
, v1
);
23988 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23990 case OPC_DEXTR_RS_L
:
23991 tcg_gen_movi_tl(t0
, v2
);
23992 tcg_gen_movi_tl(t1
, v1
);
23993 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23996 tcg_gen_movi_tl(t0
, v2
);
23997 tcg_gen_movi_tl(t1
, v1
);
23998 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24000 case OPC_DEXTR_R_W
:
24001 tcg_gen_movi_tl(t0
, v2
);
24002 tcg_gen_movi_tl(t1
, v1
);
24003 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24005 case OPC_DEXTR_RS_W
:
24006 tcg_gen_movi_tl(t0
, v2
);
24007 tcg_gen_movi_tl(t1
, v1
);
24008 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24010 case OPC_DEXTR_S_H
:
24011 tcg_gen_movi_tl(t0
, v2
);
24012 tcg_gen_movi_tl(t1
, v1
);
24013 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24015 case OPC_DEXTRV_S_H
:
24016 tcg_gen_movi_tl(t0
, v2
);
24017 tcg_gen_movi_tl(t1
, v1
);
24018 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24021 tcg_gen_movi_tl(t0
, v2
);
24022 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24024 case OPC_DEXTRV_R_L
:
24025 tcg_gen_movi_tl(t0
, v2
);
24026 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24028 case OPC_DEXTRV_RS_L
:
24029 tcg_gen_movi_tl(t0
, v2
);
24030 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24033 tcg_gen_movi_tl(t0
, v2
);
24034 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24036 case OPC_DEXTRV_R_W
:
24037 tcg_gen_movi_tl(t0
, v2
);
24038 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24040 case OPC_DEXTRV_RS_W
:
24041 tcg_gen_movi_tl(t0
, v2
);
24042 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24051 tcg_temp_free(v1_t
);
24052 tcg_temp_free(v2_t
);
24055 /* End MIPSDSP functions. */
24057 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
24059 int rs
, rt
, rd
, sa
;
24062 rs
= (ctx
->opcode
>> 21) & 0x1f;
24063 rt
= (ctx
->opcode
>> 16) & 0x1f;
24064 rd
= (ctx
->opcode
>> 11) & 0x1f;
24065 sa
= (ctx
->opcode
>> 6) & 0x1f;
24067 op1
= MASK_SPECIAL(ctx
->opcode
);
24070 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24076 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24086 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24089 MIPS_INVAL("special_r6 muldiv");
24090 generate_exception_end(ctx
, EXCP_RI
);
24096 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24100 if (rt
== 0 && sa
== 1) {
24102 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24103 * We need additionally to check other fields.
24105 gen_cl(ctx
, op1
, rd
, rs
);
24107 generate_exception_end(ctx
, EXCP_RI
);
24111 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24112 gen_helper_do_semihosting(cpu_env
);
24114 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
24115 generate_exception_end(ctx
, EXCP_RI
);
24117 generate_exception_end(ctx
, EXCP_DBp
);
24121 #if defined(TARGET_MIPS64)
24123 check_mips_64(ctx
);
24124 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
24128 if (rt
== 0 && sa
== 1) {
24130 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24131 * We need additionally to check other fields.
24133 check_mips_64(ctx
);
24134 gen_cl(ctx
, op1
, rd
, rs
);
24136 generate_exception_end(ctx
, EXCP_RI
);
24144 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24154 check_mips_64(ctx
);
24155 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24158 MIPS_INVAL("special_r6 muldiv");
24159 generate_exception_end(ctx
, EXCP_RI
);
24164 default: /* Invalid */
24165 MIPS_INVAL("special_r6");
24166 generate_exception_end(ctx
, EXCP_RI
);
24171 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24173 int rs
= extract32(ctx
->opcode
, 21, 5);
24174 int rt
= extract32(ctx
->opcode
, 16, 5);
24175 int rd
= extract32(ctx
->opcode
, 11, 5);
24176 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24179 case OPC_MOVN
: /* Conditional move */
24181 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24183 case OPC_MFHI
: /* Move from HI/LO */
24185 gen_HILO(ctx
, op1
, 0, rd
);
24188 case OPC_MTLO
: /* Move to HI/LO */
24189 gen_HILO(ctx
, op1
, 0, rs
);
24193 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24197 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24199 #if defined(TARGET_MIPS64)
24204 check_insn_opc_user_only(ctx
, INSN_R5900
);
24205 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24209 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24211 default: /* Invalid */
24212 MIPS_INVAL("special_tx79");
24213 generate_exception_end(ctx
, EXCP_RI
);
24218 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24220 int rs
, rt
, rd
, sa
;
24223 rs
= (ctx
->opcode
>> 21) & 0x1f;
24224 rt
= (ctx
->opcode
>> 16) & 0x1f;
24225 rd
= (ctx
->opcode
>> 11) & 0x1f;
24226 sa
= (ctx
->opcode
>> 6) & 0x1f;
24228 op1
= MASK_SPECIAL(ctx
->opcode
);
24230 case OPC_MOVN
: /* Conditional move */
24232 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
24233 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24234 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24236 case OPC_MFHI
: /* Move from HI/LO */
24238 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24241 case OPC_MTLO
: /* Move to HI/LO */
24242 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24245 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
24246 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24247 check_cp1_enabled(ctx
);
24248 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24249 (ctx
->opcode
>> 16) & 1);
24251 generate_exception_err(ctx
, EXCP_CpU
, 1);
24257 check_insn(ctx
, INSN_VR54XX
);
24258 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24259 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24261 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24266 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24268 #if defined(TARGET_MIPS64)
24273 check_insn(ctx
, ISA_MIPS3
);
24274 check_mips_64(ctx
);
24275 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24279 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24282 #ifdef MIPS_STRICT_STANDARD
24283 MIPS_INVAL("SPIM");
24284 generate_exception_end(ctx
, EXCP_RI
);
24286 /* Implemented as RI exception for now. */
24287 MIPS_INVAL("spim (unofficial)");
24288 generate_exception_end(ctx
, EXCP_RI
);
24291 default: /* Invalid */
24292 MIPS_INVAL("special_legacy");
24293 generate_exception_end(ctx
, EXCP_RI
);
24298 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24300 int rs
, rt
, rd
, sa
;
24303 rs
= (ctx
->opcode
>> 21) & 0x1f;
24304 rt
= (ctx
->opcode
>> 16) & 0x1f;
24305 rd
= (ctx
->opcode
>> 11) & 0x1f;
24306 sa
= (ctx
->opcode
>> 6) & 0x1f;
24308 op1
= MASK_SPECIAL(ctx
->opcode
);
24310 case OPC_SLL
: /* Shift with immediate */
24311 if (sa
== 5 && rd
== 0 &&
24312 rs
== 0 && rt
== 0) { /* PAUSE */
24313 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
24314 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24315 generate_exception_end(ctx
, EXCP_RI
);
24321 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24324 switch ((ctx
->opcode
>> 21) & 0x1f) {
24326 /* rotr is decoded as srl on non-R2 CPUs */
24327 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24332 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24335 generate_exception_end(ctx
, EXCP_RI
);
24343 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24345 case OPC_SLLV
: /* Shifts */
24347 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24350 switch ((ctx
->opcode
>> 6) & 0x1f) {
24352 /* rotrv is decoded as srlv on non-R2 CPUs */
24353 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24358 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24361 generate_exception_end(ctx
, EXCP_RI
);
24365 case OPC_SLT
: /* Set on less than */
24367 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24369 case OPC_AND
: /* Logic*/
24373 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24376 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24378 case OPC_TGE
: /* Traps */
24384 check_insn(ctx
, ISA_MIPS2
);
24385 gen_trap(ctx
, op1
, rs
, rt
, -1);
24387 case OPC_LSA
: /* OPC_PMON */
24388 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24389 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24390 decode_opc_special_r6(env
, ctx
);
24392 /* Pmon entry point, also R4010 selsl */
24393 #ifdef MIPS_STRICT_STANDARD
24394 MIPS_INVAL("PMON / selsl");
24395 generate_exception_end(ctx
, EXCP_RI
);
24397 gen_helper_0e0i(pmon
, sa
);
24402 generate_exception_end(ctx
, EXCP_SYSCALL
);
24405 generate_exception_end(ctx
, EXCP_BREAK
);
24408 check_insn(ctx
, ISA_MIPS2
);
24409 gen_sync(extract32(ctx
->opcode
, 6, 5));
24412 #if defined(TARGET_MIPS64)
24413 /* MIPS64 specific opcodes */
24418 check_insn(ctx
, ISA_MIPS3
);
24419 check_mips_64(ctx
);
24420 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24423 switch ((ctx
->opcode
>> 21) & 0x1f) {
24425 /* drotr is decoded as dsrl on non-R2 CPUs */
24426 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24431 check_insn(ctx
, ISA_MIPS3
);
24432 check_mips_64(ctx
);
24433 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24436 generate_exception_end(ctx
, EXCP_RI
);
24441 switch ((ctx
->opcode
>> 21) & 0x1f) {
24443 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24444 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24449 check_insn(ctx
, ISA_MIPS3
);
24450 check_mips_64(ctx
);
24451 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24454 generate_exception_end(ctx
, EXCP_RI
);
24462 check_insn(ctx
, ISA_MIPS3
);
24463 check_mips_64(ctx
);
24464 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24468 check_insn(ctx
, ISA_MIPS3
);
24469 check_mips_64(ctx
);
24470 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24473 switch ((ctx
->opcode
>> 6) & 0x1f) {
24475 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24476 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
24481 check_insn(ctx
, ISA_MIPS3
);
24482 check_mips_64(ctx
);
24483 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24486 generate_exception_end(ctx
, EXCP_RI
);
24491 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
24492 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
24493 decode_opc_special_r6(env
, ctx
);
24498 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
24499 decode_opc_special_r6(env
, ctx
);
24500 } else if (ctx
->insn_flags
& INSN_R5900
) {
24501 decode_opc_special_tx79(env
, ctx
);
24503 decode_opc_special_legacy(env
, ctx
);
24509 #if defined(TARGET_MIPS64)
24513 * MMI (MultiMedia Interface) ASE instructions
24514 * ===========================================
24518 * MMI instructions category: data communication
24519 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24521 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24522 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24523 * PCPYUD PEXEH PEXTLW PPACW
24532 * Parallel Copy Halfword
24534 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24535 * +-----------+---------+---------+---------+---------+-----------+
24536 * | MMI |0 0 0 0 0| rt | rd | PCPYH | MMI3 |
24537 * +-----------+---------+---------+---------+---------+-----------+
24539 static void gen_mmi_pcpyh(DisasContext
*ctx
)
24541 uint32_t pd
, rt
, rd
;
24544 opcode
= ctx
->opcode
;
24546 pd
= extract32(opcode
, 21, 5);
24547 rt
= extract32(opcode
, 16, 5);
24548 rd
= extract32(opcode
, 11, 5);
24550 if (unlikely(pd
!= 0)) {
24551 generate_exception_end(ctx
, EXCP_RI
);
24552 } else if (rd
== 0) {
24554 } else if (rt
== 0) {
24555 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24556 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24558 TCGv_i64 t0
= tcg_temp_new();
24559 TCGv_i64 t1
= tcg_temp_new();
24560 uint64_t mask
= (1ULL << 16) - 1;
24562 tcg_gen_andi_i64(t0
, cpu_gpr
[rt
], mask
);
24563 tcg_gen_movi_i64(t1
, 0);
24564 tcg_gen_or_i64(t1
, t0
, t1
);
24565 tcg_gen_shli_i64(t0
, t0
, 16);
24566 tcg_gen_or_i64(t1
, t0
, t1
);
24567 tcg_gen_shli_i64(t0
, t0
, 16);
24568 tcg_gen_or_i64(t1
, t0
, t1
);
24569 tcg_gen_shli_i64(t0
, t0
, 16);
24570 tcg_gen_or_i64(t1
, t0
, t1
);
24572 tcg_gen_mov_i64(cpu_gpr
[rd
], t1
);
24574 tcg_gen_andi_i64(t0
, cpu_mmr
[rt
], mask
);
24575 tcg_gen_movi_i64(t1
, 0);
24576 tcg_gen_or_i64(t1
, t0
, t1
);
24577 tcg_gen_shli_i64(t0
, t0
, 16);
24578 tcg_gen_or_i64(t1
, t0
, t1
);
24579 tcg_gen_shli_i64(t0
, t0
, 16);
24580 tcg_gen_or_i64(t1
, t0
, t1
);
24581 tcg_gen_shli_i64(t0
, t0
, 16);
24582 tcg_gen_or_i64(t1
, t0
, t1
);
24584 tcg_gen_mov_i64(cpu_mmr
[rd
], t1
);
24592 * PCPYLD rd, rs, rt
24594 * Parallel Copy Lower Doubleword
24596 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24597 * +-----------+---------+---------+---------+---------+-----------+
24598 * | MMI | rs | rt | rd | PCPYLD | MMI2 |
24599 * +-----------+---------+---------+---------+---------+-----------+
24601 static void gen_mmi_pcpyld(DisasContext
*ctx
)
24603 uint32_t rs
, rt
, rd
;
24606 opcode
= ctx
->opcode
;
24608 rs
= extract32(opcode
, 21, 5);
24609 rt
= extract32(opcode
, 16, 5);
24610 rd
= extract32(opcode
, 11, 5);
24616 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24618 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_gpr
[rs
]);
24621 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24624 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr
[rt
]);
24631 * PCPYUD rd, rs, rt
24633 * Parallel Copy Upper Doubleword
24635 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24636 * +-----------+---------+---------+---------+---------+-----------+
24637 * | MMI | rs | rt | rd | PCPYUD | MMI3 |
24638 * +-----------+---------+---------+---------+---------+-----------+
24640 static void gen_mmi_pcpyud(DisasContext
*ctx
)
24642 uint32_t rs
, rt
, rd
;
24645 opcode
= ctx
->opcode
;
24647 rs
= extract32(opcode
, 21, 5);
24648 rt
= extract32(opcode
, 16, 5);
24649 rd
= extract32(opcode
, 11, 5);
24655 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24657 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_mmr
[rs
]);
24660 tcg_gen_movi_i64(cpu_mmr
[rd
], 0);
24663 tcg_gen_mov_i64(cpu_mmr
[rd
], cpu_mmr
[rt
]);
24672 #if !defined(TARGET_MIPS64)
24674 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24675 #define MXU_APTN1_A 0
24676 #define MXU_APTN1_S 1
24678 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24679 #define MXU_APTN2_AA 0
24680 #define MXU_APTN2_AS 1
24681 #define MXU_APTN2_SA 2
24682 #define MXU_APTN2_SS 3
24684 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24685 #define MXU_EPTN2_AA 0
24686 #define MXU_EPTN2_AS 1
24687 #define MXU_EPTN2_SA 2
24688 #define MXU_EPTN2_SS 3
24690 /* MXU operand getting pattern 'optn2' */
24691 #define MXU_OPTN2_PTN0 0
24692 #define MXU_OPTN2_PTN1 1
24693 #define MXU_OPTN2_PTN2 2
24694 #define MXU_OPTN2_PTN3 3
24695 /* alternative naming scheme for 'optn2' */
24696 #define MXU_OPTN2_WW 0
24697 #define MXU_OPTN2_LW 1
24698 #define MXU_OPTN2_HW 2
24699 #define MXU_OPTN2_XW 3
24701 /* MXU operand getting pattern 'optn3' */
24702 #define MXU_OPTN3_PTN0 0
24703 #define MXU_OPTN3_PTN1 1
24704 #define MXU_OPTN3_PTN2 2
24705 #define MXU_OPTN3_PTN3 3
24706 #define MXU_OPTN3_PTN4 4
24707 #define MXU_OPTN3_PTN5 5
24708 #define MXU_OPTN3_PTN6 6
24709 #define MXU_OPTN3_PTN7 7
24713 * S32I2M XRa, rb - Register move from GRF to XRF
24715 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24720 t0
= tcg_temp_new();
24722 XRa
= extract32(ctx
->opcode
, 6, 5);
24723 Rb
= extract32(ctx
->opcode
, 16, 5);
24725 gen_load_gpr(t0
, Rb
);
24727 gen_store_mxu_gpr(t0
, XRa
);
24728 } else if (XRa
== 16) {
24729 gen_store_mxu_cr(t0
);
24736 * S32M2I XRa, rb - Register move from XRF to GRF
24738 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24743 t0
= tcg_temp_new();
24745 XRa
= extract32(ctx
->opcode
, 6, 5);
24746 Rb
= extract32(ctx
->opcode
, 16, 5);
24749 gen_load_mxu_gpr(t0
, XRa
);
24750 } else if (XRa
== 16) {
24751 gen_load_mxu_cr(t0
);
24754 gen_store_gpr(t0
, Rb
);
24760 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24762 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24765 uint32_t XRa
, Rb
, s8
, optn3
;
24767 t0
= tcg_temp_new();
24768 t1
= tcg_temp_new();
24770 XRa
= extract32(ctx
->opcode
, 6, 4);
24771 s8
= extract32(ctx
->opcode
, 10, 8);
24772 optn3
= extract32(ctx
->opcode
, 18, 3);
24773 Rb
= extract32(ctx
->opcode
, 21, 5);
24775 gen_load_gpr(t0
, Rb
);
24776 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24779 /* XRa[7:0] = tmp8 */
24780 case MXU_OPTN3_PTN0
:
24781 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24782 gen_load_mxu_gpr(t0
, XRa
);
24783 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24785 /* XRa[15:8] = tmp8 */
24786 case MXU_OPTN3_PTN1
:
24787 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24788 gen_load_mxu_gpr(t0
, XRa
);
24789 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24791 /* XRa[23:16] = tmp8 */
24792 case MXU_OPTN3_PTN2
:
24793 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24794 gen_load_mxu_gpr(t0
, XRa
);
24795 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24797 /* XRa[31:24] = tmp8 */
24798 case MXU_OPTN3_PTN3
:
24799 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24800 gen_load_mxu_gpr(t0
, XRa
);
24801 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24803 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24804 case MXU_OPTN3_PTN4
:
24805 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24806 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24808 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24809 case MXU_OPTN3_PTN5
:
24810 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24811 tcg_gen_shli_tl(t1
, t1
, 8);
24812 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24814 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24815 case MXU_OPTN3_PTN6
:
24816 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24817 tcg_gen_mov_tl(t0
, t1
);
24818 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24819 tcg_gen_shli_tl(t1
, t1
, 16);
24820 tcg_gen_or_tl(t0
, t0
, t1
);
24822 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24823 case MXU_OPTN3_PTN7
:
24824 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24825 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24826 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24830 gen_store_mxu_gpr(t0
, XRa
);
24837 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24839 static void gen_mxu_d16mul(DisasContext
*ctx
)
24841 TCGv t0
, t1
, t2
, t3
;
24842 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24844 t0
= tcg_temp_new();
24845 t1
= tcg_temp_new();
24846 t2
= tcg_temp_new();
24847 t3
= tcg_temp_new();
24849 XRa
= extract32(ctx
->opcode
, 6, 4);
24850 XRb
= extract32(ctx
->opcode
, 10, 4);
24851 XRc
= extract32(ctx
->opcode
, 14, 4);
24852 XRd
= extract32(ctx
->opcode
, 18, 4);
24853 optn2
= extract32(ctx
->opcode
, 22, 2);
24855 gen_load_mxu_gpr(t1
, XRb
);
24856 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24857 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24858 gen_load_mxu_gpr(t3
, XRc
);
24859 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24860 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24863 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24864 tcg_gen_mul_tl(t3
, t1
, t3
);
24865 tcg_gen_mul_tl(t2
, t0
, t2
);
24867 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24868 tcg_gen_mul_tl(t3
, t0
, t3
);
24869 tcg_gen_mul_tl(t2
, t0
, t2
);
24871 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24872 tcg_gen_mul_tl(t3
, t1
, t3
);
24873 tcg_gen_mul_tl(t2
, t1
, t2
);
24875 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24876 tcg_gen_mul_tl(t3
, t0
, t3
);
24877 tcg_gen_mul_tl(t2
, t1
, t2
);
24880 gen_store_mxu_gpr(t3
, XRa
);
24881 gen_store_mxu_gpr(t2
, XRd
);
24890 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24893 static void gen_mxu_d16mac(DisasContext
*ctx
)
24895 TCGv t0
, t1
, t2
, t3
;
24896 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24898 t0
= tcg_temp_new();
24899 t1
= tcg_temp_new();
24900 t2
= tcg_temp_new();
24901 t3
= tcg_temp_new();
24903 XRa
= extract32(ctx
->opcode
, 6, 4);
24904 XRb
= extract32(ctx
->opcode
, 10, 4);
24905 XRc
= extract32(ctx
->opcode
, 14, 4);
24906 XRd
= extract32(ctx
->opcode
, 18, 4);
24907 optn2
= extract32(ctx
->opcode
, 22, 2);
24908 aptn2
= extract32(ctx
->opcode
, 24, 2);
24910 gen_load_mxu_gpr(t1
, XRb
);
24911 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24912 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24914 gen_load_mxu_gpr(t3
, XRc
);
24915 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24916 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24919 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24920 tcg_gen_mul_tl(t3
, t1
, t3
);
24921 tcg_gen_mul_tl(t2
, t0
, t2
);
24923 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24924 tcg_gen_mul_tl(t3
, t0
, t3
);
24925 tcg_gen_mul_tl(t2
, t0
, t2
);
24927 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24928 tcg_gen_mul_tl(t3
, t1
, t3
);
24929 tcg_gen_mul_tl(t2
, t1
, t2
);
24931 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24932 tcg_gen_mul_tl(t3
, t0
, t3
);
24933 tcg_gen_mul_tl(t2
, t1
, t2
);
24936 gen_load_mxu_gpr(t0
, XRa
);
24937 gen_load_mxu_gpr(t1
, XRd
);
24941 tcg_gen_add_tl(t3
, t0
, t3
);
24942 tcg_gen_add_tl(t2
, t1
, t2
);
24945 tcg_gen_add_tl(t3
, t0
, t3
);
24946 tcg_gen_sub_tl(t2
, t1
, t2
);
24949 tcg_gen_sub_tl(t3
, t0
, t3
);
24950 tcg_gen_add_tl(t2
, t1
, t2
);
24953 tcg_gen_sub_tl(t3
, t0
, t3
);
24954 tcg_gen_sub_tl(t2
, t1
, t2
);
24957 gen_store_mxu_gpr(t3
, XRa
);
24958 gen_store_mxu_gpr(t2
, XRd
);
24967 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24968 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24970 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24972 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24973 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24975 t0
= tcg_temp_new();
24976 t1
= tcg_temp_new();
24977 t2
= tcg_temp_new();
24978 t3
= tcg_temp_new();
24979 t4
= tcg_temp_new();
24980 t5
= tcg_temp_new();
24981 t6
= tcg_temp_new();
24982 t7
= tcg_temp_new();
24984 XRa
= extract32(ctx
->opcode
, 6, 4);
24985 XRb
= extract32(ctx
->opcode
, 10, 4);
24986 XRc
= extract32(ctx
->opcode
, 14, 4);
24987 XRd
= extract32(ctx
->opcode
, 18, 4);
24988 sel
= extract32(ctx
->opcode
, 22, 2);
24990 gen_load_mxu_gpr(t3
, XRb
);
24991 gen_load_mxu_gpr(t7
, XRc
);
24995 tcg_gen_ext8s_tl(t0
, t3
);
24996 tcg_gen_shri_tl(t3
, t3
, 8);
24997 tcg_gen_ext8s_tl(t1
, t3
);
24998 tcg_gen_shri_tl(t3
, t3
, 8);
24999 tcg_gen_ext8s_tl(t2
, t3
);
25000 tcg_gen_shri_tl(t3
, t3
, 8);
25001 tcg_gen_ext8s_tl(t3
, t3
);
25004 tcg_gen_ext8u_tl(t0
, t3
);
25005 tcg_gen_shri_tl(t3
, t3
, 8);
25006 tcg_gen_ext8u_tl(t1
, t3
);
25007 tcg_gen_shri_tl(t3
, t3
, 8);
25008 tcg_gen_ext8u_tl(t2
, t3
);
25009 tcg_gen_shri_tl(t3
, t3
, 8);
25010 tcg_gen_ext8u_tl(t3
, t3
);
25013 tcg_gen_ext8u_tl(t4
, t7
);
25014 tcg_gen_shri_tl(t7
, t7
, 8);
25015 tcg_gen_ext8u_tl(t5
, t7
);
25016 tcg_gen_shri_tl(t7
, t7
, 8);
25017 tcg_gen_ext8u_tl(t6
, t7
);
25018 tcg_gen_shri_tl(t7
, t7
, 8);
25019 tcg_gen_ext8u_tl(t7
, t7
);
25021 tcg_gen_mul_tl(t0
, t0
, t4
);
25022 tcg_gen_mul_tl(t1
, t1
, t5
);
25023 tcg_gen_mul_tl(t2
, t2
, t6
);
25024 tcg_gen_mul_tl(t3
, t3
, t7
);
25026 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
25027 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
25028 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
25029 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
25031 tcg_gen_shli_tl(t1
, t1
, 16);
25032 tcg_gen_shli_tl(t3
, t3
, 16);
25034 tcg_gen_or_tl(t0
, t0
, t1
);
25035 tcg_gen_or_tl(t1
, t2
, t3
);
25037 gen_store_mxu_gpr(t0
, XRd
);
25038 gen_store_mxu_gpr(t1
, XRa
);
25051 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
25052 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
25054 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
25057 uint32_t XRa
, Rb
, s12
, sel
;
25059 t0
= tcg_temp_new();
25060 t1
= tcg_temp_new();
25062 XRa
= extract32(ctx
->opcode
, 6, 4);
25063 s12
= extract32(ctx
->opcode
, 10, 10);
25064 sel
= extract32(ctx
->opcode
, 20, 1);
25065 Rb
= extract32(ctx
->opcode
, 21, 5);
25067 gen_load_gpr(t0
, Rb
);
25069 tcg_gen_movi_tl(t1
, s12
);
25070 tcg_gen_shli_tl(t1
, t1
, 2);
25072 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
25074 tcg_gen_add_tl(t1
, t0
, t1
);
25075 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
25079 tcg_gen_bswap32_tl(t1
, t1
);
25081 gen_store_mxu_gpr(t1
, XRa
);
25089 * MXU instruction category: logic
25090 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25092 * S32NOR S32AND S32OR S32XOR
25096 * S32NOR XRa, XRb, XRc
25097 * Update XRa with the result of logical bitwise 'nor' operation
25098 * applied to the content of XRb and XRc.
25100 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25101 * +-----------+---------+-----+-------+-------+-------+-----------+
25102 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25103 * +-----------+---------+-----+-------+-------+-------+-----------+
25105 static void gen_mxu_S32NOR(DisasContext
*ctx
)
25107 uint32_t pad
, XRc
, XRb
, XRa
;
25109 pad
= extract32(ctx
->opcode
, 21, 5);
25110 XRc
= extract32(ctx
->opcode
, 14, 4);
25111 XRb
= extract32(ctx
->opcode
, 10, 4);
25112 XRa
= extract32(ctx
->opcode
, 6, 4);
25114 if (unlikely(pad
!= 0)) {
25115 /* opcode padding incorrect -> do nothing */
25116 } else if (unlikely(XRa
== 0)) {
25117 /* destination is zero register -> do nothing */
25118 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25119 /* both operands zero registers -> just set destination to all 1s */
25120 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
25121 } else if (unlikely(XRb
== 0)) {
25122 /* XRb zero register -> just set destination to the negation of XRc */
25123 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25124 } else if (unlikely(XRc
== 0)) {
25125 /* XRa zero register -> just set destination to the negation of XRb */
25126 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25127 } else if (unlikely(XRb
== XRc
)) {
25128 /* both operands same -> just set destination to the negation of XRb */
25129 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25131 /* the most general case */
25132 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25137 * S32AND XRa, XRb, XRc
25138 * Update XRa with the result of logical bitwise 'and' operation
25139 * applied to the content of XRb and XRc.
25141 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25142 * +-----------+---------+-----+-------+-------+-------+-----------+
25143 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25144 * +-----------+---------+-----+-------+-------+-------+-----------+
25146 static void gen_mxu_S32AND(DisasContext
*ctx
)
25148 uint32_t pad
, XRc
, XRb
, XRa
;
25150 pad
= extract32(ctx
->opcode
, 21, 5);
25151 XRc
= extract32(ctx
->opcode
, 14, 4);
25152 XRb
= extract32(ctx
->opcode
, 10, 4);
25153 XRa
= extract32(ctx
->opcode
, 6, 4);
25155 if (unlikely(pad
!= 0)) {
25156 /* opcode padding incorrect -> do nothing */
25157 } else if (unlikely(XRa
== 0)) {
25158 /* destination is zero register -> do nothing */
25159 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25160 /* one of operands zero register -> just set destination to all 0s */
25161 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25162 } else if (unlikely(XRb
== XRc
)) {
25163 /* both operands same -> just set destination to one of them */
25164 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25166 /* the most general case */
25167 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25172 * S32OR XRa, XRb, XRc
25173 * Update XRa with the result of logical bitwise 'or' operation
25174 * applied to the content of XRb and XRc.
25176 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25177 * +-----------+---------+-----+-------+-------+-------+-----------+
25178 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25179 * +-----------+---------+-----+-------+-------+-------+-----------+
25181 static void gen_mxu_S32OR(DisasContext
*ctx
)
25183 uint32_t pad
, XRc
, XRb
, XRa
;
25185 pad
= extract32(ctx
->opcode
, 21, 5);
25186 XRc
= extract32(ctx
->opcode
, 14, 4);
25187 XRb
= extract32(ctx
->opcode
, 10, 4);
25188 XRa
= extract32(ctx
->opcode
, 6, 4);
25190 if (unlikely(pad
!= 0)) {
25191 /* opcode padding incorrect -> do nothing */
25192 } else if (unlikely(XRa
== 0)) {
25193 /* destination is zero register -> do nothing */
25194 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25195 /* both operands zero registers -> just set destination to all 0s */
25196 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25197 } else if (unlikely(XRb
== 0)) {
25198 /* XRb zero register -> just set destination to the content of XRc */
25199 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25200 } else if (unlikely(XRc
== 0)) {
25201 /* XRc zero register -> just set destination to the content of XRb */
25202 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25203 } else if (unlikely(XRb
== XRc
)) {
25204 /* both operands same -> just set destination to one of them */
25205 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25207 /* the most general case */
25208 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25213 * S32XOR XRa, XRb, XRc
25214 * Update XRa with the result of logical bitwise 'xor' operation
25215 * applied to the content of XRb and XRc.
25217 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25218 * +-----------+---------+-----+-------+-------+-------+-----------+
25219 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL16|
25220 * +-----------+---------+-----+-------+-------+-------+-----------+
25222 static void gen_mxu_S32XOR(DisasContext
*ctx
)
25224 uint32_t pad
, XRc
, XRb
, XRa
;
25226 pad
= extract32(ctx
->opcode
, 21, 5);
25227 XRc
= extract32(ctx
->opcode
, 14, 4);
25228 XRb
= extract32(ctx
->opcode
, 10, 4);
25229 XRa
= extract32(ctx
->opcode
, 6, 4);
25231 if (unlikely(pad
!= 0)) {
25232 /* opcode padding incorrect -> do nothing */
25233 } else if (unlikely(XRa
== 0)) {
25234 /* destination is zero register -> do nothing */
25235 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25236 /* both operands zero registers -> just set destination to all 0s */
25237 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25238 } else if (unlikely(XRb
== 0)) {
25239 /* XRb zero register -> just set destination to the content of XRc */
25240 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25241 } else if (unlikely(XRc
== 0)) {
25242 /* XRc zero register -> just set destination to the content of XRb */
25243 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25244 } else if (unlikely(XRb
== XRc
)) {
25245 /* both operands same -> just set destination to all 0s */
25246 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25248 /* the most general case */
25249 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25255 * MXU instruction category max/min
25256 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25258 * S32MAX D16MAX Q8MAX
25259 * S32MIN D16MIN Q8MIN
25263 * S32MAX XRa, XRb, XRc
25264 * Update XRa with the maximum of signed 32-bit integers contained
25267 * S32MIN XRa, XRb, XRc
25268 * Update XRa with the minimum of signed 32-bit integers contained
25271 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25272 * +-----------+---------+-----+-------+-------+-------+-----------+
25273 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25274 * +-----------+---------+-----+-------+-------+-------+-----------+
25276 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
25278 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25280 pad
= extract32(ctx
->opcode
, 21, 5);
25281 opc
= extract32(ctx
->opcode
, 18, 3);
25282 XRc
= extract32(ctx
->opcode
, 14, 4);
25283 XRb
= extract32(ctx
->opcode
, 10, 4);
25284 XRa
= extract32(ctx
->opcode
, 6, 4);
25286 if (unlikely(pad
!= 0)) {
25287 /* opcode padding incorrect -> do nothing */
25288 } else if (unlikely(XRa
== 0)) {
25289 /* destination is zero register -> do nothing */
25290 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25291 /* both operands zero registers -> just set destination to zero */
25292 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25293 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25294 /* exactly one operand is zero register - find which one is not...*/
25295 uint32_t XRx
= XRb
? XRb
: XRc
;
25296 /* ...and do max/min operation with one operand 0 */
25297 if (opc
== OPC_MXU_S32MAX
) {
25298 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25300 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25302 } else if (unlikely(XRb
== XRc
)) {
25303 /* both operands same -> just set destination to one of them */
25304 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25306 /* the most general case */
25307 if (opc
== OPC_MXU_S32MAX
) {
25308 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25311 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25319 * Update XRa with the 16-bit-wise maximums of signed integers
25320 * contained in XRb and XRc.
25323 * Update XRa with the 16-bit-wise minimums of signed integers
25324 * contained in XRb and XRc.
25326 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25327 * +-----------+---------+-----+-------+-------+-------+-----------+
25328 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25329 * +-----------+---------+-----+-------+-------+-------+-----------+
25331 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25333 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25335 pad
= extract32(ctx
->opcode
, 21, 5);
25336 opc
= extract32(ctx
->opcode
, 18, 3);
25337 XRc
= extract32(ctx
->opcode
, 14, 4);
25338 XRb
= extract32(ctx
->opcode
, 10, 4);
25339 XRa
= extract32(ctx
->opcode
, 6, 4);
25341 if (unlikely(pad
!= 0)) {
25342 /* opcode padding incorrect -> do nothing */
25343 } else if (unlikely(XRc
== 0)) {
25344 /* destination is zero register -> do nothing */
25345 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25346 /* both operands zero registers -> just set destination to zero */
25347 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25348 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25349 /* exactly one operand is zero register - find which one is not...*/
25350 uint32_t XRx
= XRb
? XRb
: XRc
;
25351 /* ...and do half-word-wise max/min with one operand 0 */
25352 TCGv_i32 t0
= tcg_temp_new();
25353 TCGv_i32 t1
= tcg_const_i32(0);
25355 /* the left half-word first */
25356 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25357 if (opc
== OPC_MXU_D16MAX
) {
25358 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25360 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25363 /* the right half-word */
25364 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25365 /* move half-words to the leftmost position */
25366 tcg_gen_shli_i32(t0
, t0
, 16);
25367 /* t0 will be max/min of t0 and t1 */
25368 if (opc
== OPC_MXU_D16MAX
) {
25369 tcg_gen_smax_i32(t0
, t0
, t1
);
25371 tcg_gen_smin_i32(t0
, t0
, t1
);
25373 /* return resulting half-words to its original position */
25374 tcg_gen_shri_i32(t0
, t0
, 16);
25375 /* finaly update the destination */
25376 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25380 } else if (unlikely(XRb
== XRc
)) {
25381 /* both operands same -> just set destination to one of them */
25382 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25384 /* the most general case */
25385 TCGv_i32 t0
= tcg_temp_new();
25386 TCGv_i32 t1
= tcg_temp_new();
25388 /* the left half-word first */
25389 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25390 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25391 if (opc
== OPC_MXU_D16MAX
) {
25392 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25394 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25397 /* the right half-word */
25398 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25399 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25400 /* move half-words to the leftmost position */
25401 tcg_gen_shli_i32(t0
, t0
, 16);
25402 tcg_gen_shli_i32(t1
, t1
, 16);
25403 /* t0 will be max/min of t0 and t1 */
25404 if (opc
== OPC_MXU_D16MAX
) {
25405 tcg_gen_smax_i32(t0
, t0
, t1
);
25407 tcg_gen_smin_i32(t0
, t0
, t1
);
25409 /* return resulting half-words to its original position */
25410 tcg_gen_shri_i32(t0
, t0
, 16);
25411 /* finaly update the destination */
25412 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25421 * Update XRa with the 8-bit-wise maximums of signed integers
25422 * contained in XRb and XRc.
25425 * Update XRa with the 8-bit-wise minimums of signed integers
25426 * contained in XRb and XRc.
25428 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25429 * +-----------+---------+-----+-------+-------+-------+-----------+
25430 * | SPECIAL2 |0 0 0 0 0| opc | XRc | XRb | XRa |MXU__POOL00|
25431 * +-----------+---------+-----+-------+-------+-------+-----------+
25433 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25435 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25437 pad
= extract32(ctx
->opcode
, 21, 5);
25438 opc
= extract32(ctx
->opcode
, 18, 3);
25439 XRc
= extract32(ctx
->opcode
, 14, 4);
25440 XRb
= extract32(ctx
->opcode
, 10, 4);
25441 XRa
= extract32(ctx
->opcode
, 6, 4);
25443 if (unlikely(pad
!= 0)) {
25444 /* opcode padding incorrect -> do nothing */
25445 } else if (unlikely(XRa
== 0)) {
25446 /* destination is zero register -> do nothing */
25447 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25448 /* both operands zero registers -> just set destination to zero */
25449 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25450 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25451 /* exactly one operand is zero register - make it be the first...*/
25452 uint32_t XRx
= XRb
? XRb
: XRc
;
25453 /* ...and do byte-wise max/min with one operand 0 */
25454 TCGv_i32 t0
= tcg_temp_new();
25455 TCGv_i32 t1
= tcg_const_i32(0);
25458 /* the leftmost byte (byte 3) first */
25459 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25460 if (opc
== OPC_MXU_Q8MAX
) {
25461 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25463 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25466 /* bytes 2, 1, 0 */
25467 for (i
= 2; i
>= 0; i
--) {
25468 /* extract the byte */
25469 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25470 /* move the byte to the leftmost position */
25471 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25472 /* t0 will be max/min of t0 and t1 */
25473 if (opc
== OPC_MXU_Q8MAX
) {
25474 tcg_gen_smax_i32(t0
, t0
, t1
);
25476 tcg_gen_smin_i32(t0
, t0
, t1
);
25478 /* return resulting byte to its original position */
25479 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25480 /* finaly update the destination */
25481 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25486 } else if (unlikely(XRb
== XRc
)) {
25487 /* both operands same -> just set destination to one of them */
25488 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25490 /* the most general case */
25491 TCGv_i32 t0
= tcg_temp_new();
25492 TCGv_i32 t1
= tcg_temp_new();
25495 /* the leftmost bytes (bytes 3) first */
25496 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25497 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25498 if (opc
== OPC_MXU_Q8MAX
) {
25499 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25501 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25504 /* bytes 2, 1, 0 */
25505 for (i
= 2; i
>= 0; i
--) {
25506 /* extract corresponding bytes */
25507 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25508 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25509 /* move the bytes to the leftmost position */
25510 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25511 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25512 /* t0 will be max/min of t0 and t1 */
25513 if (opc
== OPC_MXU_Q8MAX
) {
25514 tcg_gen_smax_i32(t0
, t0
, t1
);
25516 tcg_gen_smin_i32(t0
, t0
, t1
);
25518 /* return resulting byte to its original position */
25519 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25520 /* finaly update the destination */
25521 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25531 * MXU instruction category: align
25532 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25538 * S32ALNI XRc, XRb, XRa, optn3
25539 * Arrange bytes from XRb and XRc according to one of five sets of
25540 * rules determined by optn3, and place the result in XRa.
25542 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25543 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25544 * | SPECIAL2 |optn3|0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
25545 * +-----------+-----+---+-----+-------+-------+-------+-----------+
25548 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25550 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25552 optn3
= extract32(ctx
->opcode
, 23, 3);
25553 pad
= extract32(ctx
->opcode
, 21, 2);
25554 XRc
= extract32(ctx
->opcode
, 14, 4);
25555 XRb
= extract32(ctx
->opcode
, 10, 4);
25556 XRa
= extract32(ctx
->opcode
, 6, 4);
25558 if (unlikely(pad
!= 0)) {
25559 /* opcode padding incorrect -> do nothing */
25560 } else if (unlikely(XRa
== 0)) {
25561 /* destination is zero register -> do nothing */
25562 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25563 /* both operands zero registers -> just set destination to all 0s */
25564 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25565 } else if (unlikely(XRb
== 0)) {
25566 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25568 case MXU_OPTN3_PTN0
:
25569 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25571 case MXU_OPTN3_PTN1
:
25572 case MXU_OPTN3_PTN2
:
25573 case MXU_OPTN3_PTN3
:
25574 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25577 case MXU_OPTN3_PTN4
:
25578 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25581 } else if (unlikely(XRc
== 0)) {
25582 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25584 case MXU_OPTN3_PTN0
:
25585 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25587 case MXU_OPTN3_PTN1
:
25588 case MXU_OPTN3_PTN2
:
25589 case MXU_OPTN3_PTN3
:
25590 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25592 case MXU_OPTN3_PTN4
:
25593 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25596 } else if (unlikely(XRb
== XRc
)) {
25597 /* both operands same -> just rotation or moving from any of them */
25599 case MXU_OPTN3_PTN0
:
25600 case MXU_OPTN3_PTN4
:
25601 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25603 case MXU_OPTN3_PTN1
:
25604 case MXU_OPTN3_PTN2
:
25605 case MXU_OPTN3_PTN3
:
25606 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25610 /* the most general case */
25612 case MXU_OPTN3_PTN0
:
25616 /* +---------------+ */
25617 /* | A B C D | E F G H */
25618 /* +-------+-------+ */
25623 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25626 case MXU_OPTN3_PTN1
:
25630 /* +-------------------+ */
25631 /* A | B C D E | F G H */
25632 /* +---------+---------+ */
25637 TCGv_i32 t0
= tcg_temp_new();
25638 TCGv_i32 t1
= tcg_temp_new();
25640 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25641 tcg_gen_shli_i32(t0
, t0
, 8);
25643 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25644 tcg_gen_shri_i32(t1
, t1
, 24);
25646 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25652 case MXU_OPTN3_PTN2
:
25656 /* +-------------------+ */
25657 /* A B | C D E F | G H */
25658 /* +---------+---------+ */
25663 TCGv_i32 t0
= tcg_temp_new();
25664 TCGv_i32 t1
= tcg_temp_new();
25666 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25667 tcg_gen_shli_i32(t0
, t0
, 16);
25669 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25670 tcg_gen_shri_i32(t1
, t1
, 16);
25672 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25678 case MXU_OPTN3_PTN3
:
25682 /* +-------------------+ */
25683 /* A B C | D E F G | H */
25684 /* +---------+---------+ */
25689 TCGv_i32 t0
= tcg_temp_new();
25690 TCGv_i32 t1
= tcg_temp_new();
25692 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25693 tcg_gen_shli_i32(t0
, t0
, 24);
25695 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25696 tcg_gen_shri_i32(t1
, t1
, 8);
25698 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25704 case MXU_OPTN3_PTN4
:
25708 /* +---------------+ */
25709 /* A B C D | E F G H | */
25710 /* +-------+-------+ */
25715 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25724 * Decoding engine for MXU
25725 * =======================
25730 * Decode MXU pool00
25732 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25733 * +-----------+---------+-----+-------+-------+-------+-----------+
25734 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL00|
25735 * +-----------+---------+-----+-------+-------+-------+-----------+
25738 static void decode_opc_mxu__pool00(CPUMIPSState
*env
, DisasContext
*ctx
)
25740 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25743 case OPC_MXU_S32MAX
:
25744 case OPC_MXU_S32MIN
:
25745 gen_mxu_S32MAX_S32MIN(ctx
);
25747 case OPC_MXU_D16MAX
:
25748 case OPC_MXU_D16MIN
:
25749 gen_mxu_D16MAX_D16MIN(ctx
);
25751 case OPC_MXU_Q8MAX
:
25752 case OPC_MXU_Q8MIN
:
25753 gen_mxu_Q8MAX_Q8MIN(ctx
);
25755 case OPC_MXU_Q8SLT
:
25756 /* TODO: Implement emulation of Q8SLT instruction. */
25757 MIPS_INVAL("OPC_MXU_Q8SLT");
25758 generate_exception_end(ctx
, EXCP_RI
);
25760 case OPC_MXU_Q8SLTU
:
25761 /* TODO: Implement emulation of Q8SLTU instruction. */
25762 MIPS_INVAL("OPC_MXU_Q8SLTU");
25763 generate_exception_end(ctx
, EXCP_RI
);
25766 MIPS_INVAL("decode_opc_mxu");
25767 generate_exception_end(ctx
, EXCP_RI
);
25774 * Decode MXU pool01
25776 * S32SLT, D16SLT, D16AVG, D16AVGR, Q8AVG, Q8AVGR:
25777 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25778 * +-----------+---------+-----+-------+-------+-------+-----------+
25779 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25780 * +-----------+---------+-----+-------+-------+-------+-----------+
25783 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25784 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25785 * | SPECIAL2 |en2|0 0 0|x x x| XRc | XRb | XRa |MXU__POOL01|
25786 * +-----------+---+-----+-----+-------+-------+-------+-----------+
25789 static void decode_opc_mxu__pool01(CPUMIPSState
*env
, DisasContext
*ctx
)
25791 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25794 case OPC_MXU_S32SLT
:
25795 /* TODO: Implement emulation of S32SLT instruction. */
25796 MIPS_INVAL("OPC_MXU_S32SLT");
25797 generate_exception_end(ctx
, EXCP_RI
);
25799 case OPC_MXU_D16SLT
:
25800 /* TODO: Implement emulation of D16SLT instruction. */
25801 MIPS_INVAL("OPC_MXU_D16SLT");
25802 generate_exception_end(ctx
, EXCP_RI
);
25804 case OPC_MXU_D16AVG
:
25805 /* TODO: Implement emulation of D16AVG instruction. */
25806 MIPS_INVAL("OPC_MXU_D16AVG");
25807 generate_exception_end(ctx
, EXCP_RI
);
25809 case OPC_MXU_D16AVGR
:
25810 /* TODO: Implement emulation of D16AVGR instruction. */
25811 MIPS_INVAL("OPC_MXU_D16AVGR");
25812 generate_exception_end(ctx
, EXCP_RI
);
25814 case OPC_MXU_Q8AVG
:
25815 /* TODO: Implement emulation of Q8AVG instruction. */
25816 MIPS_INVAL("OPC_MXU_Q8AVG");
25817 generate_exception_end(ctx
, EXCP_RI
);
25819 case OPC_MXU_Q8AVGR
:
25820 /* TODO: Implement emulation of Q8AVGR instruction. */
25821 MIPS_INVAL("OPC_MXU_Q8AVGR");
25822 generate_exception_end(ctx
, EXCP_RI
);
25824 case OPC_MXU_Q8ADD
:
25825 /* TODO: Implement emulation of Q8ADD instruction. */
25826 MIPS_INVAL("OPC_MXU_Q8ADD");
25827 generate_exception_end(ctx
, EXCP_RI
);
25830 MIPS_INVAL("decode_opc_mxu");
25831 generate_exception_end(ctx
, EXCP_RI
);
25838 * Decode MXU pool02
25840 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25841 * +-----------+---------+-----+-------+-------+-------+-----------+
25842 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL02|
25843 * +-----------+---------+-----+-------+-------+-------+-----------+
25846 static void decode_opc_mxu__pool02(CPUMIPSState
*env
, DisasContext
*ctx
)
25848 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25851 case OPC_MXU_S32CPS
:
25852 /* TODO: Implement emulation of S32CPS instruction. */
25853 MIPS_INVAL("OPC_MXU_S32CPS");
25854 generate_exception_end(ctx
, EXCP_RI
);
25856 case OPC_MXU_D16CPS
:
25857 /* TODO: Implement emulation of D16CPS instruction. */
25858 MIPS_INVAL("OPC_MXU_D16CPS");
25859 generate_exception_end(ctx
, EXCP_RI
);
25861 case OPC_MXU_Q8ABD
:
25862 /* TODO: Implement emulation of Q8ABD instruction. */
25863 MIPS_INVAL("OPC_MXU_Q8ABD");
25864 generate_exception_end(ctx
, EXCP_RI
);
25866 case OPC_MXU_Q16SAT
:
25867 /* TODO: Implement emulation of Q16SAT instruction. */
25868 MIPS_INVAL("OPC_MXU_Q16SAT");
25869 generate_exception_end(ctx
, EXCP_RI
);
25872 MIPS_INVAL("decode_opc_mxu");
25873 generate_exception_end(ctx
, EXCP_RI
);
25880 * Decode MXU pool03
25883 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25884 * +-----------+---+---+-------+-------+-------+-------+-----------+
25885 * | SPECIAL2 |x x|on2|0 0 0 0| XRc | XRb | XRa |MXU__POOL03|
25886 * +-----------+---+---+-------+-------+-------+-------+-----------+
25889 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25890 * +-----------+---+---+-------+-------+-------+-------+-----------+
25891 * | SPECIAL2 |x x|on2| Xd | XRc | XRb | XRa |MXU__POOL03|
25892 * +-----------+---+---+-------+-------+-------+-------+-----------+
25895 static void decode_opc_mxu__pool03(CPUMIPSState
*env
, DisasContext
*ctx
)
25897 uint32_t opcode
= extract32(ctx
->opcode
, 24, 2);
25900 case OPC_MXU_D16MULF
:
25901 /* TODO: Implement emulation of D16MULF instruction. */
25902 MIPS_INVAL("OPC_MXU_D16MULF");
25903 generate_exception_end(ctx
, EXCP_RI
);
25905 case OPC_MXU_D16MULE
:
25906 /* TODO: Implement emulation of D16MULE instruction. */
25907 MIPS_INVAL("OPC_MXU_D16MULE");
25908 generate_exception_end(ctx
, EXCP_RI
);
25911 MIPS_INVAL("decode_opc_mxu");
25912 generate_exception_end(ctx
, EXCP_RI
);
25919 * Decode MXU pool04
25921 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25922 * +-----------+---------+-+-------------------+-------+-----------+
25923 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL04|
25924 * +-----------+---------+-+-------------------+-------+-----------+
25927 static void decode_opc_mxu__pool04(CPUMIPSState
*env
, DisasContext
*ctx
)
25929 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25932 case OPC_MXU_S32LDD
:
25933 case OPC_MXU_S32LDDR
:
25934 gen_mxu_s32ldd_s32lddr(ctx
);
25937 MIPS_INVAL("decode_opc_mxu");
25938 generate_exception_end(ctx
, EXCP_RI
);
25945 * Decode MXU pool05
25947 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25948 * +-----------+---------+-+-------------------+-------+-----------+
25949 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL05|
25950 * +-----------+---------+-+-------------------+-------+-----------+
25953 static void decode_opc_mxu__pool05(CPUMIPSState
*env
, DisasContext
*ctx
)
25955 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25958 case OPC_MXU_S32STD
:
25959 /* TODO: Implement emulation of S32STD instruction. */
25960 MIPS_INVAL("OPC_MXU_S32STD");
25961 generate_exception_end(ctx
, EXCP_RI
);
25963 case OPC_MXU_S32STDR
:
25964 /* TODO: Implement emulation of S32STDR instruction. */
25965 MIPS_INVAL("OPC_MXU_S32STDR");
25966 generate_exception_end(ctx
, EXCP_RI
);
25969 MIPS_INVAL("decode_opc_mxu");
25970 generate_exception_end(ctx
, EXCP_RI
);
25977 * Decode MXU pool06
25979 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
25980 * +-----------+---------+---------+---+-------+-------+-----------+
25981 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL06|
25982 * +-----------+---------+---------+---+-------+-------+-----------+
25985 static void decode_opc_mxu__pool06(CPUMIPSState
*env
, DisasContext
*ctx
)
25987 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
25990 case OPC_MXU_S32LDDV
:
25991 /* TODO: Implement emulation of S32LDDV instruction. */
25992 MIPS_INVAL("OPC_MXU_S32LDDV");
25993 generate_exception_end(ctx
, EXCP_RI
);
25995 case OPC_MXU_S32LDDVR
:
25996 /* TODO: Implement emulation of S32LDDVR instruction. */
25997 MIPS_INVAL("OPC_MXU_S32LDDVR");
25998 generate_exception_end(ctx
, EXCP_RI
);
26001 MIPS_INVAL("decode_opc_mxu");
26002 generate_exception_end(ctx
, EXCP_RI
);
26009 * Decode MXU pool07
26011 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26012 * +-----------+---------+---------+---+-------+-------+-----------+
26013 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL07|
26014 * +-----------+---------+---------+---+-------+-------+-----------+
26017 static void decode_opc_mxu__pool07(CPUMIPSState
*env
, DisasContext
*ctx
)
26019 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26022 case OPC_MXU_S32STDV
:
26023 /* TODO: Implement emulation of S32TDV instruction. */
26024 MIPS_INVAL("OPC_MXU_S32TDV");
26025 generate_exception_end(ctx
, EXCP_RI
);
26027 case OPC_MXU_S32STDVR
:
26028 /* TODO: Implement emulation of S32TDVR instruction. */
26029 MIPS_INVAL("OPC_MXU_S32TDVR");
26030 generate_exception_end(ctx
, EXCP_RI
);
26033 MIPS_INVAL("decode_opc_mxu");
26034 generate_exception_end(ctx
, EXCP_RI
);
26041 * Decode MXU pool08
26043 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26044 * +-----------+---------+-+-------------------+-------+-----------+
26045 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL08|
26046 * +-----------+---------+-+-------------------+-------+-----------+
26049 static void decode_opc_mxu__pool08(CPUMIPSState
*env
, DisasContext
*ctx
)
26051 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
26054 case OPC_MXU_S32LDI
:
26055 /* TODO: Implement emulation of S32LDI instruction. */
26056 MIPS_INVAL("OPC_MXU_S32LDI");
26057 generate_exception_end(ctx
, EXCP_RI
);
26059 case OPC_MXU_S32LDIR
:
26060 /* TODO: Implement emulation of S32LDIR instruction. */
26061 MIPS_INVAL("OPC_MXU_S32LDIR");
26062 generate_exception_end(ctx
, EXCP_RI
);
26065 MIPS_INVAL("decode_opc_mxu");
26066 generate_exception_end(ctx
, EXCP_RI
);
26073 * Decode MXU pool09
26075 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26076 * +-----------+---------+-+-------------------+-------+-----------+
26077 * | SPECIAL2 | rb |x| s12 | XRa |MXU__POOL09|
26078 * +-----------+---------+-+-------------------+-------+-----------+
26081 static void decode_opc_mxu__pool09(CPUMIPSState
*env
, DisasContext
*ctx
)
26083 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26086 case OPC_MXU_S32SDI
:
26087 /* TODO: Implement emulation of S32SDI instruction. */
26088 MIPS_INVAL("OPC_MXU_S32SDI");
26089 generate_exception_end(ctx
, EXCP_RI
);
26091 case OPC_MXU_S32SDIR
:
26092 /* TODO: Implement emulation of S32SDIR instruction. */
26093 MIPS_INVAL("OPC_MXU_S32SDIR");
26094 generate_exception_end(ctx
, EXCP_RI
);
26097 MIPS_INVAL("decode_opc_mxu");
26098 generate_exception_end(ctx
, EXCP_RI
);
26105 * Decode MXU pool10
26107 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26108 * +-----------+---------+---------+---+-------+-------+-----------+
26109 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL10|
26110 * +-----------+---------+---------+---+-------+-------+-----------+
26113 static void decode_opc_mxu__pool10(CPUMIPSState
*env
, DisasContext
*ctx
)
26115 uint32_t opcode
= extract32(ctx
->opcode
, 5, 0);
26118 case OPC_MXU_S32LDIV
:
26119 /* TODO: Implement emulation of S32LDIV instruction. */
26120 MIPS_INVAL("OPC_MXU_S32LDIV");
26121 generate_exception_end(ctx
, EXCP_RI
);
26123 case OPC_MXU_S32LDIVR
:
26124 /* TODO: Implement emulation of S32LDIVR instruction. */
26125 MIPS_INVAL("OPC_MXU_S32LDIVR");
26126 generate_exception_end(ctx
, EXCP_RI
);
26129 MIPS_INVAL("decode_opc_mxu");
26130 generate_exception_end(ctx
, EXCP_RI
);
26137 * Decode MXU pool11
26139 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26140 * +-----------+---------+---------+---+-------+-------+-----------+
26141 * | SPECIAL2 | rb | rc |st2|x x x x| XRa |MXU__POOL11|
26142 * +-----------+---------+---------+---+-------+-------+-----------+
26145 static void decode_opc_mxu__pool11(CPUMIPSState
*env
, DisasContext
*ctx
)
26147 uint32_t opcode
= extract32(ctx
->opcode
, 10, 4);
26150 case OPC_MXU_S32SDIV
:
26151 /* TODO: Implement emulation of S32SDIV instruction. */
26152 MIPS_INVAL("OPC_MXU_S32SDIV");
26153 generate_exception_end(ctx
, EXCP_RI
);
26155 case OPC_MXU_S32SDIVR
:
26156 /* TODO: Implement emulation of S32SDIVR instruction. */
26157 MIPS_INVAL("OPC_MXU_S32SDIVR");
26158 generate_exception_end(ctx
, EXCP_RI
);
26161 MIPS_INVAL("decode_opc_mxu");
26162 generate_exception_end(ctx
, EXCP_RI
);
26169 * Decode MXU pool12
26171 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26172 * +-----------+---+---+-------+-------+-------+-------+-----------+
26173 * | SPECIAL2 |an2|x x| Xd | XRc | XRb | XRa |MXU__POOL12|
26174 * +-----------+---+---+-------+-------+-------+-------+-----------+
26177 static void decode_opc_mxu__pool12(CPUMIPSState
*env
, DisasContext
*ctx
)
26179 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26182 case OPC_MXU_D32ACC
:
26183 /* TODO: Implement emulation of D32ACC instruction. */
26184 MIPS_INVAL("OPC_MXU_D32ACC");
26185 generate_exception_end(ctx
, EXCP_RI
);
26187 case OPC_MXU_D32ACCM
:
26188 /* TODO: Implement emulation of D32ACCM instruction. */
26189 MIPS_INVAL("OPC_MXU_D32ACCM");
26190 generate_exception_end(ctx
, EXCP_RI
);
26192 case OPC_MXU_D32ASUM
:
26193 /* TODO: Implement emulation of D32ASUM instruction. */
26194 MIPS_INVAL("OPC_MXU_D32ASUM");
26195 generate_exception_end(ctx
, EXCP_RI
);
26198 MIPS_INVAL("decode_opc_mxu");
26199 generate_exception_end(ctx
, EXCP_RI
);
26206 * Decode MXU pool13
26208 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26209 * +-----------+---+---+-------+-------+-------+-------+-----------+
26210 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL13|
26211 * +-----------+---+---+-------+-------+-------+-------+-----------+
26214 static void decode_opc_mxu__pool13(CPUMIPSState
*env
, DisasContext
*ctx
)
26216 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26219 case OPC_MXU_Q16ACC
:
26220 /* TODO: Implement emulation of Q16ACC instruction. */
26221 MIPS_INVAL("OPC_MXU_Q16ACC");
26222 generate_exception_end(ctx
, EXCP_RI
);
26224 case OPC_MXU_Q16ACCM
:
26225 /* TODO: Implement emulation of Q16ACCM instruction. */
26226 MIPS_INVAL("OPC_MXU_Q16ACCM");
26227 generate_exception_end(ctx
, EXCP_RI
);
26229 case OPC_MXU_Q16ASUM
:
26230 /* TODO: Implement emulation of Q16ASUM instruction. */
26231 MIPS_INVAL("OPC_MXU_Q16ASUM");
26232 generate_exception_end(ctx
, EXCP_RI
);
26235 MIPS_INVAL("decode_opc_mxu");
26236 generate_exception_end(ctx
, EXCP_RI
);
26243 * Decode MXU pool14
26246 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26247 * +-----------+---+---+-------+-------+-------+-------+-----------+
26248 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL14|
26249 * +-----------+---+---+-------+-------+-------+-------+-----------+
26252 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26253 * +-----------+---+---+-------+-------+-------+-------+-----------+
26254 * | SPECIAL2 |en2|x x|0 0 0 0| XRc | XRb | XRa |MXU__POOL14|
26255 * +-----------+---+---+-------+-------+-------+-------+-----------+
26258 static void decode_opc_mxu__pool14(CPUMIPSState
*env
, DisasContext
*ctx
)
26260 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26263 case OPC_MXU_Q8ADDE
:
26264 /* TODO: Implement emulation of Q8ADDE instruction. */
26265 MIPS_INVAL("OPC_MXU_Q8ADDE");
26266 generate_exception_end(ctx
, EXCP_RI
);
26268 case OPC_MXU_D8SUM
:
26269 /* TODO: Implement emulation of D8SUM instruction. */
26270 MIPS_INVAL("OPC_MXU_D8SUM");
26271 generate_exception_end(ctx
, EXCP_RI
);
26273 case OPC_MXU_D8SUMC
:
26274 /* TODO: Implement emulation of D8SUMC instruction. */
26275 MIPS_INVAL("OPC_MXU_D8SUMC");
26276 generate_exception_end(ctx
, EXCP_RI
);
26279 MIPS_INVAL("decode_opc_mxu");
26280 generate_exception_end(ctx
, EXCP_RI
);
26287 * Decode MXU pool15
26289 * S32MUL, S32MULU, S32EXTRV:
26290 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26291 * +-----------+---------+---------+---+-------+-------+-----------+
26292 * | SPECIAL2 | rs | rt |x x| XRd | XRa |MXU__POOL15|
26293 * +-----------+---------+---------+---+-------+-------+-----------+
26296 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26297 * +-----------+---------+---------+---+-------+-------+-----------+
26298 * | SPECIAL2 | rb | sft5 |x x| XRd | XRa |MXU__POOL15|
26299 * +-----------+---------+---------+---+-------+-------+-----------+
26302 static void decode_opc_mxu__pool15(CPUMIPSState
*env
, DisasContext
*ctx
)
26304 uint32_t opcode
= extract32(ctx
->opcode
, 14, 2);
26307 case OPC_MXU_S32MUL
:
26308 /* TODO: Implement emulation of S32MUL instruction. */
26309 MIPS_INVAL("OPC_MXU_S32MUL");
26310 generate_exception_end(ctx
, EXCP_RI
);
26312 case OPC_MXU_S32MULU
:
26313 /* TODO: Implement emulation of S32MULU instruction. */
26314 MIPS_INVAL("OPC_MXU_S32MULU");
26315 generate_exception_end(ctx
, EXCP_RI
);
26317 case OPC_MXU_S32EXTR
:
26318 /* TODO: Implement emulation of S32EXTR instruction. */
26319 MIPS_INVAL("OPC_MXU_S32EXTR");
26320 generate_exception_end(ctx
, EXCP_RI
);
26322 case OPC_MXU_S32EXTRV
:
26323 /* TODO: Implement emulation of S32EXTRV instruction. */
26324 MIPS_INVAL("OPC_MXU_S32EXTRV");
26325 generate_exception_end(ctx
, EXCP_RI
);
26328 MIPS_INVAL("decode_opc_mxu");
26329 generate_exception_end(ctx
, EXCP_RI
);
26336 * Decode MXU pool16
26339 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26340 * +-----------+---------+-----+-------+-------+-------+-----------+
26341 * | SPECIAL2 | rb |x x x| XRc | XRb | XRa |MXU__POOL16|
26342 * +-----------+---------+-----+-------+-------+-------+-----------+
26345 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26346 * +-----------+---------+-----+-------+-------+-------+-----------+
26347 * | SPECIAL2 | rs |x x x| XRc | XRb | XRa |MXU__POOL16|
26348 * +-----------+---------+-----+-------+-------+-------+-----------+
26351 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26352 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26353 * | SPECIAL2 | s3 |0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26354 * +-----------+-----+---+-----+-------+-------+-------+-----------+
26357 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26358 * +-----------+-----+---+-----+-------+---------------+-----------+
26359 * | SPECIAL2 |optn3|0 0|x x x| XRc | s8 |MXU__POOL16|
26360 * +-----------+-----+---+-----+-------+---------------+-----------+
26362 * S32NOR, S32AND, S32OR, S32XOR:
26363 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26364 * +-----------+---------+-----+-------+-------+-------+-----------+
26365 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL16|
26366 * +-----------+---------+-----+-------+-------+-------+-----------+
26369 static void decode_opc_mxu__pool16(CPUMIPSState
*env
, DisasContext
*ctx
)
26371 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26374 case OPC_MXU_D32SARW
:
26375 /* TODO: Implement emulation of D32SARW instruction. */
26376 MIPS_INVAL("OPC_MXU_D32SARW");
26377 generate_exception_end(ctx
, EXCP_RI
);
26379 case OPC_MXU_S32ALN
:
26380 /* TODO: Implement emulation of S32ALN instruction. */
26381 MIPS_INVAL("OPC_MXU_S32ALN");
26382 generate_exception_end(ctx
, EXCP_RI
);
26384 case OPC_MXU_S32ALNI
:
26385 gen_mxu_S32ALNI(ctx
);
26387 case OPC_MXU_S32LUI
:
26388 /* TODO: Implement emulation of S32LUI instruction. */
26389 MIPS_INVAL("OPC_MXU_S32LUI");
26390 generate_exception_end(ctx
, EXCP_RI
);
26392 case OPC_MXU_S32NOR
:
26393 gen_mxu_S32NOR(ctx
);
26395 case OPC_MXU_S32AND
:
26396 gen_mxu_S32AND(ctx
);
26398 case OPC_MXU_S32OR
:
26399 gen_mxu_S32OR(ctx
);
26401 case OPC_MXU_S32XOR
:
26402 gen_mxu_S32XOR(ctx
);
26405 MIPS_INVAL("decode_opc_mxu");
26406 generate_exception_end(ctx
, EXCP_RI
);
26413 * Decode MXU pool17
26415 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26416 * +-----------+---------+---------+---+---------+-----+-----------+
26417 * | SPECIAL2 | rs | rt |0 0| rd |x x x|MXU__POOL15|
26418 * +-----------+---------+---------+---+---------+-----+-----------+
26421 static void decode_opc_mxu__pool17(CPUMIPSState
*env
, DisasContext
*ctx
)
26423 uint32_t opcode
= extract32(ctx
->opcode
, 6, 2);
26427 /* TODO: Implement emulation of LXW instruction. */
26428 MIPS_INVAL("OPC_MXU_LXW");
26429 generate_exception_end(ctx
, EXCP_RI
);
26432 /* TODO: Implement emulation of LXH instruction. */
26433 MIPS_INVAL("OPC_MXU_LXH");
26434 generate_exception_end(ctx
, EXCP_RI
);
26437 /* TODO: Implement emulation of LXHU instruction. */
26438 MIPS_INVAL("OPC_MXU_LXHU");
26439 generate_exception_end(ctx
, EXCP_RI
);
26442 /* TODO: Implement emulation of LXB instruction. */
26443 MIPS_INVAL("OPC_MXU_LXB");
26444 generate_exception_end(ctx
, EXCP_RI
);
26447 /* TODO: Implement emulation of LXBU instruction. */
26448 MIPS_INVAL("OPC_MXU_LXBU");
26449 generate_exception_end(ctx
, EXCP_RI
);
26452 MIPS_INVAL("decode_opc_mxu");
26453 generate_exception_end(ctx
, EXCP_RI
);
26459 * Decode MXU pool18
26461 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26462 * +-----------+---------+-----+-------+-------+-------+-----------+
26463 * | SPECIAL2 | rb |x x x| XRd | XRa |0 0 0 0|MXU__POOL18|
26464 * +-----------+---------+-----+-------+-------+-------+-----------+
26467 static void decode_opc_mxu__pool18(CPUMIPSState
*env
, DisasContext
*ctx
)
26469 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26472 case OPC_MXU_D32SLLV
:
26473 /* TODO: Implement emulation of D32SLLV instruction. */
26474 MIPS_INVAL("OPC_MXU_D32SLLV");
26475 generate_exception_end(ctx
, EXCP_RI
);
26477 case OPC_MXU_D32SLRV
:
26478 /* TODO: Implement emulation of D32SLRV instruction. */
26479 MIPS_INVAL("OPC_MXU_D32SLRV");
26480 generate_exception_end(ctx
, EXCP_RI
);
26482 case OPC_MXU_D32SARV
:
26483 /* TODO: Implement emulation of D32SARV instruction. */
26484 MIPS_INVAL("OPC_MXU_D32SARV");
26485 generate_exception_end(ctx
, EXCP_RI
);
26487 case OPC_MXU_Q16SLLV
:
26488 /* TODO: Implement emulation of Q16SLLV instruction. */
26489 MIPS_INVAL("OPC_MXU_Q16SLLV");
26490 generate_exception_end(ctx
, EXCP_RI
);
26492 case OPC_MXU_Q16SLRV
:
26493 /* TODO: Implement emulation of Q16SLRV instruction. */
26494 MIPS_INVAL("OPC_MXU_Q16SLRV");
26495 generate_exception_end(ctx
, EXCP_RI
);
26497 case OPC_MXU_Q16SARV
:
26498 /* TODO: Implement emulation of Q16SARV instruction. */
26499 MIPS_INVAL("OPC_MXU_Q16SARV");
26500 generate_exception_end(ctx
, EXCP_RI
);
26503 MIPS_INVAL("decode_opc_mxu");
26504 generate_exception_end(ctx
, EXCP_RI
);
26511 * Decode MXU pool19
26513 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26514 * +-----------+---+---+-------+-------+-------+-------+-----------+
26515 * | SPECIAL2 |0 0|x x| XRd | XRc | XRb | XRa |MXU__POOL19|
26516 * +-----------+---+---+-------+-------+-------+-------+-----------+
26519 static void decode_opc_mxu__pool19(CPUMIPSState
*env
, DisasContext
*ctx
)
26521 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26524 case OPC_MXU_Q8MUL
:
26525 case OPC_MXU_Q8MULSU
:
26526 gen_mxu_q8mul_q8mulsu(ctx
);
26529 MIPS_INVAL("decode_opc_mxu");
26530 generate_exception_end(ctx
, EXCP_RI
);
26537 * Decode MXU pool20
26539 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26540 * +-----------+---------+-----+-------+-------+-------+-----------+
26541 * | SPECIAL2 |0 0 0 0 0|x x x| XRc | XRb | XRa |MXU__POOL20|
26542 * +-----------+---------+-----+-------+-------+-------+-----------+
26545 static void decode_opc_mxu__pool20(CPUMIPSState
*env
, DisasContext
*ctx
)
26547 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
26550 case OPC_MXU_Q8MOVZ
:
26551 /* TODO: Implement emulation of Q8MOVZ instruction. */
26552 MIPS_INVAL("OPC_MXU_Q8MOVZ");
26553 generate_exception_end(ctx
, EXCP_RI
);
26555 case OPC_MXU_Q8MOVN
:
26556 /* TODO: Implement emulation of Q8MOVN instruction. */
26557 MIPS_INVAL("OPC_MXU_Q8MOVN");
26558 generate_exception_end(ctx
, EXCP_RI
);
26560 case OPC_MXU_D16MOVZ
:
26561 /* TODO: Implement emulation of D16MOVZ instruction. */
26562 MIPS_INVAL("OPC_MXU_D16MOVZ");
26563 generate_exception_end(ctx
, EXCP_RI
);
26565 case OPC_MXU_D16MOVN
:
26566 /* TODO: Implement emulation of D16MOVN instruction. */
26567 MIPS_INVAL("OPC_MXU_D16MOVN");
26568 generate_exception_end(ctx
, EXCP_RI
);
26570 case OPC_MXU_S32MOVZ
:
26571 /* TODO: Implement emulation of S32MOVZ instruction. */
26572 MIPS_INVAL("OPC_MXU_S32MOVZ");
26573 generate_exception_end(ctx
, EXCP_RI
);
26575 case OPC_MXU_S32MOVN
:
26576 /* TODO: Implement emulation of S32MOVN instruction. */
26577 MIPS_INVAL("OPC_MXU_S32MOVN");
26578 generate_exception_end(ctx
, EXCP_RI
);
26581 MIPS_INVAL("decode_opc_mxu");
26582 generate_exception_end(ctx
, EXCP_RI
);
26589 * Decode MXU pool21
26591 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26592 * +-----------+---+---+-------+-------+-------+-------+-----------+
26593 * | SPECIAL2 |an2|x x| XRd | XRc | XRb | XRa |MXU__POOL21|
26594 * +-----------+---+---+-------+-------+-------+-------+-----------+
26597 static void decode_opc_mxu__pool21(CPUMIPSState
*env
, DisasContext
*ctx
)
26599 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
26602 case OPC_MXU_Q8MAC
:
26603 /* TODO: Implement emulation of Q8MAC instruction. */
26604 MIPS_INVAL("OPC_MXU_Q8MAC");
26605 generate_exception_end(ctx
, EXCP_RI
);
26607 case OPC_MXU_Q8MACSU
:
26608 /* TODO: Implement emulation of Q8MACSU instruction. */
26609 MIPS_INVAL("OPC_MXU_Q8MACSU");
26610 generate_exception_end(ctx
, EXCP_RI
);
26613 MIPS_INVAL("decode_opc_mxu");
26614 generate_exception_end(ctx
, EXCP_RI
);
26621 * Main MXU decoding function
26623 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
26624 * +-----------+---------------------------------------+-----------+
26625 * | SPECIAL2 | |x x x x x x|
26626 * +-----------+---------------------------------------+-----------+
26629 static void decode_opc_mxu(CPUMIPSState
*env
, DisasContext
*ctx
)
26632 * TODO: Investigate necessity of including handling of
26633 * CLZ, CLO, SDBB in this function, as they belong to
26634 * SPECIAL2 opcode space for regular pre-R6 MIPS ISAs.
26636 uint32_t opcode
= extract32(ctx
->opcode
, 0, 6);
26638 if (opcode
== OPC__MXU_MUL
) {
26639 uint32_t rs
, rt
, rd
, op1
;
26641 rs
= extract32(ctx
->opcode
, 21, 5);
26642 rt
= extract32(ctx
->opcode
, 16, 5);
26643 rd
= extract32(ctx
->opcode
, 11, 5);
26644 op1
= MASK_SPECIAL2(ctx
->opcode
);
26646 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26651 if (opcode
== OPC_MXU_S32M2I
) {
26652 gen_mxu_s32m2i(ctx
);
26656 if (opcode
== OPC_MXU_S32I2M
) {
26657 gen_mxu_s32i2m(ctx
);
26662 TCGv t_mxu_cr
= tcg_temp_new();
26663 TCGLabel
*l_exit
= gen_new_label();
26665 gen_load_mxu_cr(t_mxu_cr
);
26666 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
26667 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
26670 case OPC_MXU_S32MADD
:
26671 /* TODO: Implement emulation of S32MADD instruction. */
26672 MIPS_INVAL("OPC_MXU_S32MADD");
26673 generate_exception_end(ctx
, EXCP_RI
);
26675 case OPC_MXU_S32MADDU
:
26676 /* TODO: Implement emulation of S32MADDU instruction. */
26677 MIPS_INVAL("OPC_MXU_S32MADDU");
26678 generate_exception_end(ctx
, EXCP_RI
);
26680 case OPC_MXU__POOL00
:
26681 decode_opc_mxu__pool00(env
, ctx
);
26683 case OPC_MXU_S32MSUB
:
26684 /* TODO: Implement emulation of S32MSUB instruction. */
26685 MIPS_INVAL("OPC_MXU_S32MSUB");
26686 generate_exception_end(ctx
, EXCP_RI
);
26688 case OPC_MXU_S32MSUBU
:
26689 /* TODO: Implement emulation of S32MSUBU instruction. */
26690 MIPS_INVAL("OPC_MXU_S32MSUBU");
26691 generate_exception_end(ctx
, EXCP_RI
);
26693 case OPC_MXU__POOL01
:
26694 decode_opc_mxu__pool01(env
, ctx
);
26696 case OPC_MXU__POOL02
:
26697 decode_opc_mxu__pool02(env
, ctx
);
26699 case OPC_MXU_D16MUL
:
26700 gen_mxu_d16mul(ctx
);
26702 case OPC_MXU__POOL03
:
26703 decode_opc_mxu__pool03(env
, ctx
);
26705 case OPC_MXU_D16MAC
:
26706 gen_mxu_d16mac(ctx
);
26708 case OPC_MXU_D16MACF
:
26709 /* TODO: Implement emulation of D16MACF instruction. */
26710 MIPS_INVAL("OPC_MXU_D16MACF");
26711 generate_exception_end(ctx
, EXCP_RI
);
26713 case OPC_MXU_D16MADL
:
26714 /* TODO: Implement emulation of D16MADL instruction. */
26715 MIPS_INVAL("OPC_MXU_D16MADL");
26716 generate_exception_end(ctx
, EXCP_RI
);
26718 case OPC_MXU_S16MAD
:
26719 /* TODO: Implement emulation of S16MAD instruction. */
26720 MIPS_INVAL("OPC_MXU_S16MAD");
26721 generate_exception_end(ctx
, EXCP_RI
);
26723 case OPC_MXU_Q16ADD
:
26724 /* TODO: Implement emulation of Q16ADD instruction. */
26725 MIPS_INVAL("OPC_MXU_Q16ADD");
26726 generate_exception_end(ctx
, EXCP_RI
);
26728 case OPC_MXU_D16MACE
:
26729 /* TODO: Implement emulation of D16MACE instruction. */
26730 MIPS_INVAL("OPC_MXU_D16MACE");
26731 generate_exception_end(ctx
, EXCP_RI
);
26733 case OPC_MXU__POOL04
:
26734 decode_opc_mxu__pool04(env
, ctx
);
26736 case OPC_MXU__POOL05
:
26737 decode_opc_mxu__pool05(env
, ctx
);
26739 case OPC_MXU__POOL06
:
26740 decode_opc_mxu__pool06(env
, ctx
);
26742 case OPC_MXU__POOL07
:
26743 decode_opc_mxu__pool07(env
, ctx
);
26745 case OPC_MXU__POOL08
:
26746 decode_opc_mxu__pool08(env
, ctx
);
26748 case OPC_MXU__POOL09
:
26749 decode_opc_mxu__pool09(env
, ctx
);
26751 case OPC_MXU__POOL10
:
26752 decode_opc_mxu__pool10(env
, ctx
);
26754 case OPC_MXU__POOL11
:
26755 decode_opc_mxu__pool11(env
, ctx
);
26757 case OPC_MXU_D32ADD
:
26758 /* TODO: Implement emulation of D32ADD instruction. */
26759 MIPS_INVAL("OPC_MXU_D32ADD");
26760 generate_exception_end(ctx
, EXCP_RI
);
26762 case OPC_MXU__POOL12
:
26763 decode_opc_mxu__pool12(env
, ctx
);
26765 case OPC_MXU__POOL13
:
26766 decode_opc_mxu__pool13(env
, ctx
);
26768 case OPC_MXU__POOL14
:
26769 decode_opc_mxu__pool14(env
, ctx
);
26771 case OPC_MXU_Q8ACCE
:
26772 /* TODO: Implement emulation of Q8ACCE instruction. */
26773 MIPS_INVAL("OPC_MXU_Q8ACCE");
26774 generate_exception_end(ctx
, EXCP_RI
);
26776 case OPC_MXU_S8LDD
:
26777 gen_mxu_s8ldd(ctx
);
26779 case OPC_MXU_S8STD
:
26780 /* TODO: Implement emulation of S8STD instruction. */
26781 MIPS_INVAL("OPC_MXU_S8STD");
26782 generate_exception_end(ctx
, EXCP_RI
);
26784 case OPC_MXU_S8LDI
:
26785 /* TODO: Implement emulation of S8LDI instruction. */
26786 MIPS_INVAL("OPC_MXU_S8LDI");
26787 generate_exception_end(ctx
, EXCP_RI
);
26789 case OPC_MXU_S8SDI
:
26790 /* TODO: Implement emulation of S8SDI instruction. */
26791 MIPS_INVAL("OPC_MXU_S8SDI");
26792 generate_exception_end(ctx
, EXCP_RI
);
26794 case OPC_MXU__POOL15
:
26795 decode_opc_mxu__pool15(env
, ctx
);
26797 case OPC_MXU__POOL16
:
26798 decode_opc_mxu__pool16(env
, ctx
);
26800 case OPC_MXU__POOL17
:
26801 decode_opc_mxu__pool17(env
, ctx
);
26803 case OPC_MXU_S16LDD
:
26804 /* TODO: Implement emulation of S16LDD instruction. */
26805 MIPS_INVAL("OPC_MXU_S16LDD");
26806 generate_exception_end(ctx
, EXCP_RI
);
26808 case OPC_MXU_S16STD
:
26809 /* TODO: Implement emulation of S16STD instruction. */
26810 MIPS_INVAL("OPC_MXU_S16STD");
26811 generate_exception_end(ctx
, EXCP_RI
);
26813 case OPC_MXU_S16LDI
:
26814 /* TODO: Implement emulation of S16LDI instruction. */
26815 MIPS_INVAL("OPC_MXU_S16LDI");
26816 generate_exception_end(ctx
, EXCP_RI
);
26818 case OPC_MXU_S16SDI
:
26819 /* TODO: Implement emulation of S16SDI instruction. */
26820 MIPS_INVAL("OPC_MXU_S16SDI");
26821 generate_exception_end(ctx
, EXCP_RI
);
26823 case OPC_MXU_D32SLL
:
26824 /* TODO: Implement emulation of D32SLL instruction. */
26825 MIPS_INVAL("OPC_MXU_D32SLL");
26826 generate_exception_end(ctx
, EXCP_RI
);
26828 case OPC_MXU_D32SLR
:
26829 /* TODO: Implement emulation of D32SLR instruction. */
26830 MIPS_INVAL("OPC_MXU_D32SLR");
26831 generate_exception_end(ctx
, EXCP_RI
);
26833 case OPC_MXU_D32SARL
:
26834 /* TODO: Implement emulation of D32SARL instruction. */
26835 MIPS_INVAL("OPC_MXU_D32SARL");
26836 generate_exception_end(ctx
, EXCP_RI
);
26838 case OPC_MXU_D32SAR
:
26839 /* TODO: Implement emulation of D32SAR instruction. */
26840 MIPS_INVAL("OPC_MXU_D32SAR");
26841 generate_exception_end(ctx
, EXCP_RI
);
26843 case OPC_MXU_Q16SLL
:
26844 /* TODO: Implement emulation of Q16SLL instruction. */
26845 MIPS_INVAL("OPC_MXU_Q16SLL");
26846 generate_exception_end(ctx
, EXCP_RI
);
26848 case OPC_MXU_Q16SLR
:
26849 /* TODO: Implement emulation of Q16SLR instruction. */
26850 MIPS_INVAL("OPC_MXU_Q16SLR");
26851 generate_exception_end(ctx
, EXCP_RI
);
26853 case OPC_MXU__POOL18
:
26854 decode_opc_mxu__pool18(env
, ctx
);
26856 case OPC_MXU_Q16SAR
:
26857 /* TODO: Implement emulation of Q16SAR instruction. */
26858 MIPS_INVAL("OPC_MXU_Q16SAR");
26859 generate_exception_end(ctx
, EXCP_RI
);
26861 case OPC_MXU__POOL19
:
26862 decode_opc_mxu__pool19(env
, ctx
);
26864 case OPC_MXU__POOL20
:
26865 decode_opc_mxu__pool20(env
, ctx
);
26867 case OPC_MXU__POOL21
:
26868 decode_opc_mxu__pool21(env
, ctx
);
26870 case OPC_MXU_Q16SCOP
:
26871 /* TODO: Implement emulation of Q16SCOP instruction. */
26872 MIPS_INVAL("OPC_MXU_Q16SCOP");
26873 generate_exception_end(ctx
, EXCP_RI
);
26875 case OPC_MXU_Q8MADL
:
26876 /* TODO: Implement emulation of Q8MADL instruction. */
26877 MIPS_INVAL("OPC_MXU_Q8MADL");
26878 generate_exception_end(ctx
, EXCP_RI
);
26880 case OPC_MXU_S32SFL
:
26881 /* TODO: Implement emulation of S32SFL instruction. */
26882 MIPS_INVAL("OPC_MXU_S32SFL");
26883 generate_exception_end(ctx
, EXCP_RI
);
26885 case OPC_MXU_Q8SAD
:
26886 /* TODO: Implement emulation of Q8SAD instruction. */
26887 MIPS_INVAL("OPC_MXU_Q8SAD");
26888 generate_exception_end(ctx
, EXCP_RI
);
26891 MIPS_INVAL("decode_opc_mxu");
26892 generate_exception_end(ctx
, EXCP_RI
);
26895 gen_set_label(l_exit
);
26896 tcg_temp_free(t_mxu_cr
);
26900 #endif /* !defined(TARGET_MIPS64) */
26903 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26908 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
26910 rs
= (ctx
->opcode
>> 21) & 0x1f;
26911 rt
= (ctx
->opcode
>> 16) & 0x1f;
26912 rd
= (ctx
->opcode
>> 11) & 0x1f;
26914 op1
= MASK_SPECIAL2(ctx
->opcode
);
26916 case OPC_MADD
: /* Multiply and add/sub */
26920 check_insn(ctx
, ISA_MIPS32
);
26921 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
26924 gen_arith(ctx
, op1
, rd
, rs
, rt
);
26927 case OPC_DIVU_G_2F
:
26928 case OPC_MULT_G_2F
:
26929 case OPC_MULTU_G_2F
:
26931 case OPC_MODU_G_2F
:
26932 check_insn(ctx
, INSN_LOONGSON2F
);
26933 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26937 check_insn(ctx
, ISA_MIPS32
);
26938 gen_cl(ctx
, op1
, rd
, rs
);
26941 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
26942 gen_helper_do_semihosting(cpu_env
);
26945 * XXX: not clear which exception should be raised
26946 * when in debug mode...
26948 check_insn(ctx
, ISA_MIPS32
);
26949 generate_exception_end(ctx
, EXCP_DBp
);
26952 #if defined(TARGET_MIPS64)
26955 check_insn(ctx
, ISA_MIPS64
);
26956 check_mips_64(ctx
);
26957 gen_cl(ctx
, op1
, rd
, rs
);
26959 case OPC_DMULT_G_2F
:
26960 case OPC_DMULTU_G_2F
:
26961 case OPC_DDIV_G_2F
:
26962 case OPC_DDIVU_G_2F
:
26963 case OPC_DMOD_G_2F
:
26964 case OPC_DMODU_G_2F
:
26965 check_insn(ctx
, INSN_LOONGSON2F
);
26966 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26969 default: /* Invalid */
26970 MIPS_INVAL("special2_legacy");
26971 generate_exception_end(ctx
, EXCP_RI
);
26976 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
26978 int rs
, rt
, rd
, sa
;
26982 rs
= (ctx
->opcode
>> 21) & 0x1f;
26983 rt
= (ctx
->opcode
>> 16) & 0x1f;
26984 rd
= (ctx
->opcode
>> 11) & 0x1f;
26985 sa
= (ctx
->opcode
>> 6) & 0x1f;
26986 imm
= (int16_t)ctx
->opcode
>> 7;
26988 op1
= MASK_SPECIAL3(ctx
->opcode
);
26992 /* hint codes 24-31 are reserved and signal RI */
26993 generate_exception_end(ctx
, EXCP_RI
);
26995 /* Treat as NOP. */
26998 check_cp0_enabled(ctx
);
26999 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27000 gen_cache_operation(ctx
, rt
, rs
, imm
);
27004 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
27007 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27012 /* Treat as NOP. */
27015 op2
= MASK_BSHFL(ctx
->opcode
);
27021 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
27024 gen_bitswap(ctx
, op2
, rd
, rt
);
27029 #if defined(TARGET_MIPS64)
27031 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
27034 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27037 check_mips_64(ctx
);
27040 /* Treat as NOP. */
27043 op2
= MASK_DBSHFL(ctx
->opcode
);
27053 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
27056 gen_bitswap(ctx
, op2
, rd
, rt
);
27063 default: /* Invalid */
27064 MIPS_INVAL("special3_r6");
27065 generate_exception_end(ctx
, EXCP_RI
);
27070 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
27075 rs
= (ctx
->opcode
>> 21) & 0x1f;
27076 rt
= (ctx
->opcode
>> 16) & 0x1f;
27077 rd
= (ctx
->opcode
>> 11) & 0x1f;
27079 op1
= MASK_SPECIAL3(ctx
->opcode
);
27082 case OPC_DIVU_G_2E
:
27084 case OPC_MODU_G_2E
:
27085 case OPC_MULT_G_2E
:
27086 case OPC_MULTU_G_2E
:
27088 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
27089 * the same mask and op1.
27091 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
27092 op2
= MASK_ADDUH_QB(ctx
->opcode
);
27095 case OPC_ADDUH_R_QB
:
27097 case OPC_ADDQH_R_PH
:
27099 case OPC_ADDQH_R_W
:
27101 case OPC_SUBUH_R_QB
:
27103 case OPC_SUBQH_R_PH
:
27105 case OPC_SUBQH_R_W
:
27106 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27111 case OPC_MULQ_RS_W
:
27112 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27115 MIPS_INVAL("MASK ADDUH.QB");
27116 generate_exception_end(ctx
, EXCP_RI
);
27119 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
27120 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27122 generate_exception_end(ctx
, EXCP_RI
);
27126 op2
= MASK_LX(ctx
->opcode
);
27128 #if defined(TARGET_MIPS64)
27134 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
27136 default: /* Invalid */
27137 MIPS_INVAL("MASK LX");
27138 generate_exception_end(ctx
, EXCP_RI
);
27142 case OPC_ABSQ_S_PH_DSP
:
27143 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
27145 case OPC_ABSQ_S_QB
:
27146 case OPC_ABSQ_S_PH
:
27148 case OPC_PRECEQ_W_PHL
:
27149 case OPC_PRECEQ_W_PHR
:
27150 case OPC_PRECEQU_PH_QBL
:
27151 case OPC_PRECEQU_PH_QBR
:
27152 case OPC_PRECEQU_PH_QBLA
:
27153 case OPC_PRECEQU_PH_QBRA
:
27154 case OPC_PRECEU_PH_QBL
:
27155 case OPC_PRECEU_PH_QBR
:
27156 case OPC_PRECEU_PH_QBLA
:
27157 case OPC_PRECEU_PH_QBRA
:
27158 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27165 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27168 MIPS_INVAL("MASK ABSQ_S.PH");
27169 generate_exception_end(ctx
, EXCP_RI
);
27173 case OPC_ADDU_QB_DSP
:
27174 op2
= MASK_ADDU_QB(ctx
->opcode
);
27177 case OPC_ADDQ_S_PH
:
27180 case OPC_ADDU_S_QB
:
27182 case OPC_ADDU_S_PH
:
27184 case OPC_SUBQ_S_PH
:
27187 case OPC_SUBU_S_QB
:
27189 case OPC_SUBU_S_PH
:
27193 case OPC_RADDU_W_QB
:
27194 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27196 case OPC_MULEU_S_PH_QBL
:
27197 case OPC_MULEU_S_PH_QBR
:
27198 case OPC_MULQ_RS_PH
:
27199 case OPC_MULEQ_S_W_PHL
:
27200 case OPC_MULEQ_S_W_PHR
:
27201 case OPC_MULQ_S_PH
:
27202 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27204 default: /* Invalid */
27205 MIPS_INVAL("MASK ADDU.QB");
27206 generate_exception_end(ctx
, EXCP_RI
);
27211 case OPC_CMPU_EQ_QB_DSP
:
27212 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
27214 case OPC_PRECR_SRA_PH_W
:
27215 case OPC_PRECR_SRA_R_PH_W
:
27216 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27218 case OPC_PRECR_QB_PH
:
27219 case OPC_PRECRQ_QB_PH
:
27220 case OPC_PRECRQ_PH_W
:
27221 case OPC_PRECRQ_RS_PH_W
:
27222 case OPC_PRECRQU_S_QB_PH
:
27223 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27225 case OPC_CMPU_EQ_QB
:
27226 case OPC_CMPU_LT_QB
:
27227 case OPC_CMPU_LE_QB
:
27228 case OPC_CMP_EQ_PH
:
27229 case OPC_CMP_LT_PH
:
27230 case OPC_CMP_LE_PH
:
27231 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27233 case OPC_CMPGU_EQ_QB
:
27234 case OPC_CMPGU_LT_QB
:
27235 case OPC_CMPGU_LE_QB
:
27236 case OPC_CMPGDU_EQ_QB
:
27237 case OPC_CMPGDU_LT_QB
:
27238 case OPC_CMPGDU_LE_QB
:
27241 case OPC_PACKRL_PH
:
27242 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27244 default: /* Invalid */
27245 MIPS_INVAL("MASK CMPU.EQ.QB");
27246 generate_exception_end(ctx
, EXCP_RI
);
27250 case OPC_SHLL_QB_DSP
:
27251 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27253 case OPC_DPA_W_PH_DSP
:
27254 op2
= MASK_DPA_W_PH(ctx
->opcode
);
27256 case OPC_DPAU_H_QBL
:
27257 case OPC_DPAU_H_QBR
:
27258 case OPC_DPSU_H_QBL
:
27259 case OPC_DPSU_H_QBR
:
27261 case OPC_DPAX_W_PH
:
27262 case OPC_DPAQ_S_W_PH
:
27263 case OPC_DPAQX_S_W_PH
:
27264 case OPC_DPAQX_SA_W_PH
:
27266 case OPC_DPSX_W_PH
:
27267 case OPC_DPSQ_S_W_PH
:
27268 case OPC_DPSQX_S_W_PH
:
27269 case OPC_DPSQX_SA_W_PH
:
27270 case OPC_MULSAQ_S_W_PH
:
27271 case OPC_DPAQ_SA_L_W
:
27272 case OPC_DPSQ_SA_L_W
:
27273 case OPC_MAQ_S_W_PHL
:
27274 case OPC_MAQ_S_W_PHR
:
27275 case OPC_MAQ_SA_W_PHL
:
27276 case OPC_MAQ_SA_W_PHR
:
27277 case OPC_MULSA_W_PH
:
27278 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27280 default: /* Invalid */
27281 MIPS_INVAL("MASK DPAW.PH");
27282 generate_exception_end(ctx
, EXCP_RI
);
27287 op2
= MASK_INSV(ctx
->opcode
);
27298 t0
= tcg_temp_new();
27299 t1
= tcg_temp_new();
27301 gen_load_gpr(t0
, rt
);
27302 gen_load_gpr(t1
, rs
);
27304 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27310 default: /* Invalid */
27311 MIPS_INVAL("MASK INSV");
27312 generate_exception_end(ctx
, EXCP_RI
);
27316 case OPC_APPEND_DSP
:
27317 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27319 case OPC_EXTR_W_DSP
:
27320 op2
= MASK_EXTR_W(ctx
->opcode
);
27324 case OPC_EXTR_RS_W
:
27326 case OPC_EXTRV_S_H
:
27328 case OPC_EXTRV_R_W
:
27329 case OPC_EXTRV_RS_W
:
27334 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27337 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27343 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27345 default: /* Invalid */
27346 MIPS_INVAL("MASK EXTR.W");
27347 generate_exception_end(ctx
, EXCP_RI
);
27351 #if defined(TARGET_MIPS64)
27352 case OPC_DDIV_G_2E
:
27353 case OPC_DDIVU_G_2E
:
27354 case OPC_DMULT_G_2E
:
27355 case OPC_DMULTU_G_2E
:
27356 case OPC_DMOD_G_2E
:
27357 case OPC_DMODU_G_2E
:
27358 check_insn(ctx
, INSN_LOONGSON2E
);
27359 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
27361 case OPC_ABSQ_S_QH_DSP
:
27362 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
27364 case OPC_PRECEQ_L_PWL
:
27365 case OPC_PRECEQ_L_PWR
:
27366 case OPC_PRECEQ_PW_QHL
:
27367 case OPC_PRECEQ_PW_QHR
:
27368 case OPC_PRECEQ_PW_QHLA
:
27369 case OPC_PRECEQ_PW_QHRA
:
27370 case OPC_PRECEQU_QH_OBL
:
27371 case OPC_PRECEQU_QH_OBR
:
27372 case OPC_PRECEQU_QH_OBLA
:
27373 case OPC_PRECEQU_QH_OBRA
:
27374 case OPC_PRECEU_QH_OBL
:
27375 case OPC_PRECEU_QH_OBR
:
27376 case OPC_PRECEU_QH_OBLA
:
27377 case OPC_PRECEU_QH_OBRA
:
27378 case OPC_ABSQ_S_OB
:
27379 case OPC_ABSQ_S_PW
:
27380 case OPC_ABSQ_S_QH
:
27381 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27389 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
27391 default: /* Invalid */
27392 MIPS_INVAL("MASK ABSQ_S.QH");
27393 generate_exception_end(ctx
, EXCP_RI
);
27397 case OPC_ADDU_OB_DSP
:
27398 op2
= MASK_ADDU_OB(ctx
->opcode
);
27400 case OPC_RADDU_L_OB
:
27402 case OPC_SUBQ_S_PW
:
27404 case OPC_SUBQ_S_QH
:
27406 case OPC_SUBU_S_OB
:
27408 case OPC_SUBU_S_QH
:
27410 case OPC_SUBUH_R_OB
:
27412 case OPC_ADDQ_S_PW
:
27414 case OPC_ADDQ_S_QH
:
27416 case OPC_ADDU_S_OB
:
27418 case OPC_ADDU_S_QH
:
27420 case OPC_ADDUH_R_OB
:
27421 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27423 case OPC_MULEQ_S_PW_QHL
:
27424 case OPC_MULEQ_S_PW_QHR
:
27425 case OPC_MULEU_S_QH_OBL
:
27426 case OPC_MULEU_S_QH_OBR
:
27427 case OPC_MULQ_RS_QH
:
27428 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27430 default: /* Invalid */
27431 MIPS_INVAL("MASK ADDU.OB");
27432 generate_exception_end(ctx
, EXCP_RI
);
27436 case OPC_CMPU_EQ_OB_DSP
:
27437 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
27439 case OPC_PRECR_SRA_QH_PW
:
27440 case OPC_PRECR_SRA_R_QH_PW
:
27441 /* Return value is rt. */
27442 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
27444 case OPC_PRECR_OB_QH
:
27445 case OPC_PRECRQ_OB_QH
:
27446 case OPC_PRECRQ_PW_L
:
27447 case OPC_PRECRQ_QH_PW
:
27448 case OPC_PRECRQ_RS_QH_PW
:
27449 case OPC_PRECRQU_S_OB_QH
:
27450 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
27452 case OPC_CMPU_EQ_OB
:
27453 case OPC_CMPU_LT_OB
:
27454 case OPC_CMPU_LE_OB
:
27455 case OPC_CMP_EQ_QH
:
27456 case OPC_CMP_LT_QH
:
27457 case OPC_CMP_LE_QH
:
27458 case OPC_CMP_EQ_PW
:
27459 case OPC_CMP_LT_PW
:
27460 case OPC_CMP_LE_PW
:
27461 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27463 case OPC_CMPGDU_EQ_OB
:
27464 case OPC_CMPGDU_LT_OB
:
27465 case OPC_CMPGDU_LE_OB
:
27466 case OPC_CMPGU_EQ_OB
:
27467 case OPC_CMPGU_LT_OB
:
27468 case OPC_CMPGU_LE_OB
:
27469 case OPC_PACKRL_PW
:
27473 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
27475 default: /* Invalid */
27476 MIPS_INVAL("MASK CMPU_EQ.OB");
27477 generate_exception_end(ctx
, EXCP_RI
);
27481 case OPC_DAPPEND_DSP
:
27482 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
27484 case OPC_DEXTR_W_DSP
:
27485 op2
= MASK_DEXTR_W(ctx
->opcode
);
27492 case OPC_DEXTR_R_L
:
27493 case OPC_DEXTR_RS_L
:
27495 case OPC_DEXTR_R_W
:
27496 case OPC_DEXTR_RS_W
:
27497 case OPC_DEXTR_S_H
:
27499 case OPC_DEXTRV_R_L
:
27500 case OPC_DEXTRV_RS_L
:
27501 case OPC_DEXTRV_S_H
:
27503 case OPC_DEXTRV_R_W
:
27504 case OPC_DEXTRV_RS_W
:
27505 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
27510 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27512 default: /* Invalid */
27513 MIPS_INVAL("MASK EXTR.W");
27514 generate_exception_end(ctx
, EXCP_RI
);
27518 case OPC_DPAQ_W_QH_DSP
:
27519 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
27521 case OPC_DPAU_H_OBL
:
27522 case OPC_DPAU_H_OBR
:
27523 case OPC_DPSU_H_OBL
:
27524 case OPC_DPSU_H_OBR
:
27526 case OPC_DPAQ_S_W_QH
:
27528 case OPC_DPSQ_S_W_QH
:
27529 case OPC_MULSAQ_S_W_QH
:
27530 case OPC_DPAQ_SA_L_PW
:
27531 case OPC_DPSQ_SA_L_PW
:
27532 case OPC_MULSAQ_S_L_PW
:
27533 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27535 case OPC_MAQ_S_W_QHLL
:
27536 case OPC_MAQ_S_W_QHLR
:
27537 case OPC_MAQ_S_W_QHRL
:
27538 case OPC_MAQ_S_W_QHRR
:
27539 case OPC_MAQ_SA_W_QHLL
:
27540 case OPC_MAQ_SA_W_QHLR
:
27541 case OPC_MAQ_SA_W_QHRL
:
27542 case OPC_MAQ_SA_W_QHRR
:
27543 case OPC_MAQ_S_L_PWL
:
27544 case OPC_MAQ_S_L_PWR
:
27549 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
27551 default: /* Invalid */
27552 MIPS_INVAL("MASK DPAQ.W.QH");
27553 generate_exception_end(ctx
, EXCP_RI
);
27557 case OPC_DINSV_DSP
:
27558 op2
= MASK_INSV(ctx
->opcode
);
27569 t0
= tcg_temp_new();
27570 t1
= tcg_temp_new();
27572 gen_load_gpr(t0
, rt
);
27573 gen_load_gpr(t1
, rs
);
27575 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
27581 default: /* Invalid */
27582 MIPS_INVAL("MASK DINSV");
27583 generate_exception_end(ctx
, EXCP_RI
);
27587 case OPC_SHLL_OB_DSP
:
27588 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
27591 default: /* Invalid */
27592 MIPS_INVAL("special3_legacy");
27593 generate_exception_end(ctx
, EXCP_RI
);
27599 #if defined(TARGET_MIPS64)
27601 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
27603 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
27606 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
27607 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
27608 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
27609 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
27610 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
27611 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
27612 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
27613 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
27614 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
27615 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
27616 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
27617 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
27618 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
27619 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
27620 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
27621 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
27622 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
27623 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
27624 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
27625 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
27626 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
27627 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
27628 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
27629 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
27630 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
27631 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI0 */
27634 MIPS_INVAL("TX79 MMI class MMI0");
27635 generate_exception_end(ctx
, EXCP_RI
);
27640 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
27642 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
27645 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
27646 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
27647 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
27648 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
27649 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
27650 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
27651 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
27652 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
27653 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
27654 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
27655 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
27656 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
27657 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
27658 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
27659 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
27660 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
27661 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
27662 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
27663 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI1 */
27666 MIPS_INVAL("TX79 MMI class MMI1");
27667 generate_exception_end(ctx
, EXCP_RI
);
27672 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
27674 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
27677 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
27678 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
27679 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
27680 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
27681 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
27682 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
27683 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
27684 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
27685 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
27686 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
27687 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
27688 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
27689 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
27690 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
27691 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
27692 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
27693 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
27694 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
27695 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
27696 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
27697 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
27698 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI2 */
27700 case MMI_OPC_2_PCPYLD
:
27701 gen_mmi_pcpyld(ctx
);
27704 MIPS_INVAL("TX79 MMI class MMI2");
27705 generate_exception_end(ctx
, EXCP_RI
);
27710 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
27712 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
27715 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
27716 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
27717 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
27718 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
27719 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
27720 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
27721 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
27722 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
27723 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
27724 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
27725 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
27726 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI3 */
27728 case MMI_OPC_3_PCPYH
:
27729 gen_mmi_pcpyh(ctx
);
27731 case MMI_OPC_3_PCPYUD
:
27732 gen_mmi_pcpyud(ctx
);
27735 MIPS_INVAL("TX79 MMI class MMI3");
27736 generate_exception_end(ctx
, EXCP_RI
);
27741 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
27743 uint32_t opc
= MASK_MMI(ctx
->opcode
);
27744 int rs
= extract32(ctx
->opcode
, 21, 5);
27745 int rt
= extract32(ctx
->opcode
, 16, 5);
27746 int rd
= extract32(ctx
->opcode
, 11, 5);
27749 case MMI_OPC_CLASS_MMI0
:
27750 decode_mmi0(env
, ctx
);
27752 case MMI_OPC_CLASS_MMI1
:
27753 decode_mmi1(env
, ctx
);
27755 case MMI_OPC_CLASS_MMI2
:
27756 decode_mmi2(env
, ctx
);
27758 case MMI_OPC_CLASS_MMI3
:
27759 decode_mmi3(env
, ctx
);
27761 case MMI_OPC_MULT1
:
27762 case MMI_OPC_MULTU1
:
27764 case MMI_OPC_MADDU
:
27765 case MMI_OPC_MADD1
:
27766 case MMI_OPC_MADDU1
:
27767 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
27770 case MMI_OPC_DIVU1
:
27771 gen_div1_tx79(ctx
, opc
, rs
, rt
);
27773 case MMI_OPC_MTLO1
:
27774 case MMI_OPC_MTHI1
:
27775 gen_HILO1_tx79(ctx
, opc
, rs
);
27777 case MMI_OPC_MFLO1
:
27778 case MMI_OPC_MFHI1
:
27779 gen_HILO1_tx79(ctx
, opc
, rd
);
27781 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
27782 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
27783 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
27784 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
27785 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
27786 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
27787 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
27788 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
27789 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
27790 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_CLASS_MMI */
27793 MIPS_INVAL("TX79 MMI class");
27794 generate_exception_end(ctx
, EXCP_RI
);
27799 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
27801 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_LQ */
27804 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
27806 generate_exception_end(ctx
, EXCP_RI
); /* TODO: MMI_OPC_SQ */
27810 * The TX79-specific instruction Store Quadword
27812 * +--------+-------+-------+------------------------+
27813 * | 011111 | base | rt | offset | SQ
27814 * +--------+-------+-------+------------------------+
27817 * has the same opcode as the Read Hardware Register instruction
27819 * +--------+-------+-------+-------+-------+--------+
27820 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
27821 * +--------+-------+-------+-------+-------+--------+
27824 * that is required, trapped and emulated by the Linux kernel. However, all
27825 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
27826 * offset is odd. Therefore all valid SQ instructions can execute normally.
27827 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
27828 * between SQ and RDHWR, as the Linux kernel does.
27830 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
27832 int base
= extract32(ctx
->opcode
, 21, 5);
27833 int rt
= extract32(ctx
->opcode
, 16, 5);
27834 int offset
= extract32(ctx
->opcode
, 0, 16);
27836 #ifdef CONFIG_USER_ONLY
27837 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
27838 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
27840 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
27841 int rd
= extract32(ctx
->opcode
, 11, 5);
27843 gen_rdhwr(ctx
, rt
, rd
, 0);
27848 gen_mmi_sq(ctx
, base
, rt
, offset
);
27853 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
27855 int rs
, rt
, rd
, sa
;
27859 rs
= (ctx
->opcode
>> 21) & 0x1f;
27860 rt
= (ctx
->opcode
>> 16) & 0x1f;
27861 rd
= (ctx
->opcode
>> 11) & 0x1f;
27862 sa
= (ctx
->opcode
>> 6) & 0x1f;
27863 imm
= sextract32(ctx
->opcode
, 7, 9);
27865 op1
= MASK_SPECIAL3(ctx
->opcode
);
27868 * EVA loads and stores overlap Loongson 2E instructions decoded by
27869 * decode_opc_special3_legacy(), so be careful to allow their decoding when
27876 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27884 check_cp0_enabled(ctx
);
27885 gen_ld(ctx
, op1
, rt
, rs
, imm
);
27889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
27894 check_cp0_enabled(ctx
);
27895 gen_st(ctx
, op1
, rt
, rs
, imm
);
27898 check_cp0_enabled(ctx
);
27899 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
27902 check_cp0_enabled(ctx
);
27903 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27904 gen_cache_operation(ctx
, rt
, rs
, imm
);
27906 /* Treat as NOP. */
27909 check_cp0_enabled(ctx
);
27910 /* Treat as NOP. */
27918 check_insn(ctx
, ISA_MIPS32R2
);
27919 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27922 op2
= MASK_BSHFL(ctx
->opcode
);
27929 check_insn(ctx
, ISA_MIPS32R6
);
27930 decode_opc_special3_r6(env
, ctx
);
27933 check_insn(ctx
, ISA_MIPS32R2
);
27934 gen_bshfl(ctx
, op2
, rt
, rd
);
27938 #if defined(TARGET_MIPS64)
27945 check_insn(ctx
, ISA_MIPS64R2
);
27946 check_mips_64(ctx
);
27947 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
27950 op2
= MASK_DBSHFL(ctx
->opcode
);
27961 check_insn(ctx
, ISA_MIPS32R6
);
27962 decode_opc_special3_r6(env
, ctx
);
27965 check_insn(ctx
, ISA_MIPS64R2
);
27966 check_mips_64(ctx
);
27967 op2
= MASK_DBSHFL(ctx
->opcode
);
27968 gen_bshfl(ctx
, op2
, rt
, rd
);
27974 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
27979 TCGv t0
= tcg_temp_new();
27980 TCGv t1
= tcg_temp_new();
27982 gen_load_gpr(t0
, rt
);
27983 gen_load_gpr(t1
, rs
);
27984 gen_helper_fork(t0
, t1
);
27992 TCGv t0
= tcg_temp_new();
27994 gen_load_gpr(t0
, rs
);
27995 gen_helper_yield(t0
, cpu_env
, t0
);
27996 gen_store_gpr(t0
, rd
);
28001 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
28002 decode_opc_special3_r6(env
, ctx
);
28004 decode_opc_special3_legacy(env
, ctx
);
28009 /* MIPS SIMD Architecture (MSA) */
28010 static inline int check_msa_access(DisasContext
*ctx
)
28012 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
28013 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
28014 generate_exception_end(ctx
, EXCP_RI
);
28018 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
28019 if (ctx
->insn_flags
& ASE_MSA
) {
28020 generate_exception_end(ctx
, EXCP_MSADIS
);
28023 generate_exception_end(ctx
, EXCP_RI
);
28030 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
28032 /* generates tcg ops to check if any element is 0 */
28033 /* Note this function only works with MSA_WRLEN = 128 */
28034 uint64_t eval_zero_or_big
= 0;
28035 uint64_t eval_big
= 0;
28036 TCGv_i64 t0
= tcg_temp_new_i64();
28037 TCGv_i64 t1
= tcg_temp_new_i64();
28040 eval_zero_or_big
= 0x0101010101010101ULL
;
28041 eval_big
= 0x8080808080808080ULL
;
28044 eval_zero_or_big
= 0x0001000100010001ULL
;
28045 eval_big
= 0x8000800080008000ULL
;
28048 eval_zero_or_big
= 0x0000000100000001ULL
;
28049 eval_big
= 0x8000000080000000ULL
;
28052 eval_zero_or_big
= 0x0000000000000001ULL
;
28053 eval_big
= 0x8000000000000000ULL
;
28056 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<< 1], eval_zero_or_big
);
28057 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<< 1]);
28058 tcg_gen_andi_i64(t0
, t0
, eval_big
);
28059 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<< 1) + 1], eval_zero_or_big
);
28060 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<< 1) + 1]);
28061 tcg_gen_andi_i64(t1
, t1
, eval_big
);
28062 tcg_gen_or_i64(t0
, t0
, t1
);
28063 /* if all bits are zero then all elements are not zero */
28064 /* if some bit is non-zero then some element is zero */
28065 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
28066 tcg_gen_trunc_i64_tl(tresult
, t0
);
28067 tcg_temp_free_i64(t0
);
28068 tcg_temp_free_i64(t1
);
28071 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
28073 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28074 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28075 int64_t s16
= (int16_t)ctx
->opcode
;
28077 check_msa_access(ctx
);
28079 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
28080 generate_exception_end(ctx
, EXCP_RI
);
28087 TCGv_i64 t0
= tcg_temp_new_i64();
28088 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<< 1], msa_wr_d
[(wt
<< 1) + 1]);
28089 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
28090 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
28091 tcg_gen_trunc_i64_tl(bcond
, t0
);
28092 tcg_temp_free_i64(t0
);
28099 gen_check_zero_element(bcond
, df
, wt
);
28105 gen_check_zero_element(bcond
, df
, wt
);
28106 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
28110 ctx
->btarget
= ctx
->base
.pc_next
+ (s16
<< 2) + 4;
28112 ctx
->hflags
|= MIPS_HFLAG_BC
;
28113 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
28116 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
28118 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
28119 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
28120 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28121 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28123 TCGv_i32 twd
= tcg_const_i32(wd
);
28124 TCGv_i32 tws
= tcg_const_i32(ws
);
28125 TCGv_i32 ti8
= tcg_const_i32(i8
);
28127 switch (MASK_MSA_I8(ctx
->opcode
)) {
28129 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
28132 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
28135 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
28138 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
28141 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
28144 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
28147 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
28153 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
28154 if (df
== DF_DOUBLE
) {
28155 generate_exception_end(ctx
, EXCP_RI
);
28157 TCGv_i32 tdf
= tcg_const_i32(df
);
28158 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
28159 tcg_temp_free_i32(tdf
);
28164 MIPS_INVAL("MSA instruction");
28165 generate_exception_end(ctx
, EXCP_RI
);
28169 tcg_temp_free_i32(twd
);
28170 tcg_temp_free_i32(tws
);
28171 tcg_temp_free_i32(ti8
);
28174 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
28176 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28177 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28178 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
28179 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
28180 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28181 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28183 TCGv_i32 tdf
= tcg_const_i32(df
);
28184 TCGv_i32 twd
= tcg_const_i32(wd
);
28185 TCGv_i32 tws
= tcg_const_i32(ws
);
28186 TCGv_i32 timm
= tcg_temp_new_i32();
28187 tcg_gen_movi_i32(timm
, u5
);
28189 switch (MASK_MSA_I5(ctx
->opcode
)) {
28191 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28194 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28196 case OPC_MAXI_S_df
:
28197 tcg_gen_movi_i32(timm
, s5
);
28198 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28200 case OPC_MAXI_U_df
:
28201 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28203 case OPC_MINI_S_df
:
28204 tcg_gen_movi_i32(timm
, s5
);
28205 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28207 case OPC_MINI_U_df
:
28208 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28211 tcg_gen_movi_i32(timm
, s5
);
28212 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
28214 case OPC_CLTI_S_df
:
28215 tcg_gen_movi_i32(timm
, s5
);
28216 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28218 case OPC_CLTI_U_df
:
28219 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28221 case OPC_CLEI_S_df
:
28222 tcg_gen_movi_i32(timm
, s5
);
28223 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
28225 case OPC_CLEI_U_df
:
28226 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
28230 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
28231 tcg_gen_movi_i32(timm
, s10
);
28232 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
28236 MIPS_INVAL("MSA instruction");
28237 generate_exception_end(ctx
, EXCP_RI
);
28241 tcg_temp_free_i32(tdf
);
28242 tcg_temp_free_i32(twd
);
28243 tcg_temp_free_i32(tws
);
28244 tcg_temp_free_i32(timm
);
28247 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
28249 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28250 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
28251 uint32_t df
= 0, m
= 0;
28252 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28253 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28260 if ((dfm
& 0x40) == 0x00) {
28263 } else if ((dfm
& 0x60) == 0x40) {
28266 } else if ((dfm
& 0x70) == 0x60) {
28269 } else if ((dfm
& 0x78) == 0x70) {
28273 generate_exception_end(ctx
, EXCP_RI
);
28277 tdf
= tcg_const_i32(df
);
28278 tm
= tcg_const_i32(m
);
28279 twd
= tcg_const_i32(wd
);
28280 tws
= tcg_const_i32(ws
);
28282 switch (MASK_MSA_BIT(ctx
->opcode
)) {
28284 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28287 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
28290 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28293 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28296 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
28299 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
28301 case OPC_BINSLI_df
:
28302 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
28304 case OPC_BINSRI_df
:
28305 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28308 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
28311 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
28314 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
28317 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
28320 MIPS_INVAL("MSA instruction");
28321 generate_exception_end(ctx
, EXCP_RI
);
28325 tcg_temp_free_i32(tdf
);
28326 tcg_temp_free_i32(tm
);
28327 tcg_temp_free_i32(twd
);
28328 tcg_temp_free_i32(tws
);
28331 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
28333 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
28334 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
28335 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28336 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28337 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28339 TCGv_i32 tdf
= tcg_const_i32(df
);
28340 TCGv_i32 twd
= tcg_const_i32(wd
);
28341 TCGv_i32 tws
= tcg_const_i32(ws
);
28342 TCGv_i32 twt
= tcg_const_i32(wt
);
28344 switch (MASK_MSA_3R(ctx
->opcode
)) {
28346 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
28349 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28352 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28355 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28357 case OPC_SUBS_S_df
:
28358 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28361 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28364 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
28367 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28370 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
28373 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28375 case OPC_ADDS_A_df
:
28376 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28378 case OPC_SUBS_U_df
:
28379 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28382 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28385 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
28388 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
28391 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28394 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28397 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28399 case OPC_ADDS_S_df
:
28400 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28402 case OPC_SUBSUS_U_df
:
28403 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28406 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28409 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28412 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28415 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28418 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28421 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28423 case OPC_ADDS_U_df
:
28424 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28426 case OPC_SUBSUU_S_df
:
28427 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28430 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28433 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
28436 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28439 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28442 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28444 case OPC_ASUB_S_df
:
28445 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28448 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28451 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28454 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
28457 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28460 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28463 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28465 case OPC_ASUB_U_df
:
28466 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28469 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28472 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28475 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
28478 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28480 case OPC_AVER_S_df
:
28481 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28484 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28487 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
28490 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
28493 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28495 case OPC_AVER_U_df
:
28496 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28499 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28502 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
28505 case OPC_DOTP_S_df
:
28506 case OPC_DOTP_U_df
:
28507 case OPC_DPADD_S_df
:
28508 case OPC_DPADD_U_df
:
28509 case OPC_DPSUB_S_df
:
28510 case OPC_HADD_S_df
:
28511 case OPC_DPSUB_U_df
:
28512 case OPC_HADD_U_df
:
28513 case OPC_HSUB_S_df
:
28514 case OPC_HSUB_U_df
:
28515 if (df
== DF_BYTE
) {
28516 generate_exception_end(ctx
, EXCP_RI
);
28519 switch (MASK_MSA_3R(ctx
->opcode
)) {
28520 case OPC_DOTP_S_df
:
28521 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28523 case OPC_DOTP_U_df
:
28524 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28526 case OPC_DPADD_S_df
:
28527 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28529 case OPC_DPADD_U_df
:
28530 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28532 case OPC_DPSUB_S_df
:
28533 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28535 case OPC_HADD_S_df
:
28536 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28538 case OPC_DPSUB_U_df
:
28539 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28541 case OPC_HADD_U_df
:
28542 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28544 case OPC_HSUB_S_df
:
28545 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
28547 case OPC_HSUB_U_df
:
28548 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
28553 MIPS_INVAL("MSA instruction");
28554 generate_exception_end(ctx
, EXCP_RI
);
28557 tcg_temp_free_i32(twd
);
28558 tcg_temp_free_i32(tws
);
28559 tcg_temp_free_i32(twt
);
28560 tcg_temp_free_i32(tdf
);
28563 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
28565 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
28566 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
28567 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
28568 TCGv telm
= tcg_temp_new();
28569 TCGv_i32 tsr
= tcg_const_i32(source
);
28570 TCGv_i32 tdt
= tcg_const_i32(dest
);
28572 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
28574 gen_load_gpr(telm
, source
);
28575 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
28578 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
28579 gen_store_gpr(telm
, dest
);
28582 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
28585 MIPS_INVAL("MSA instruction");
28586 generate_exception_end(ctx
, EXCP_RI
);
28590 tcg_temp_free(telm
);
28591 tcg_temp_free_i32(tdt
);
28592 tcg_temp_free_i32(tsr
);
28595 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
28598 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28599 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28600 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28602 TCGv_i32 tws
= tcg_const_i32(ws
);
28603 TCGv_i32 twd
= tcg_const_i32(wd
);
28604 TCGv_i32 tn
= tcg_const_i32(n
);
28605 TCGv_i32 tdf
= tcg_const_i32(df
);
28607 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28609 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
28611 case OPC_SPLATI_df
:
28612 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
28615 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
28617 case OPC_COPY_S_df
:
28618 case OPC_COPY_U_df
:
28619 case OPC_INSERT_df
:
28620 #if !defined(TARGET_MIPS64)
28621 /* Double format valid only for MIPS64 */
28622 if (df
== DF_DOUBLE
) {
28623 generate_exception_end(ctx
, EXCP_RI
);
28626 if ((MASK_MSA_ELM(ctx
->opcode
) == OPC_COPY_U_df
) &&
28628 generate_exception_end(ctx
, EXCP_RI
);
28632 switch (MASK_MSA_ELM(ctx
->opcode
)) {
28633 case OPC_COPY_S_df
:
28634 if (likely(wd
!= 0)) {
28637 gen_helper_msa_copy_s_b(cpu_env
, twd
, tws
, tn
);
28640 gen_helper_msa_copy_s_h(cpu_env
, twd
, tws
, tn
);
28643 gen_helper_msa_copy_s_w(cpu_env
, twd
, tws
, tn
);
28645 #if defined(TARGET_MIPS64)
28647 gen_helper_msa_copy_s_d(cpu_env
, twd
, tws
, tn
);
28655 case OPC_COPY_U_df
:
28656 if (likely(wd
!= 0)) {
28659 gen_helper_msa_copy_u_b(cpu_env
, twd
, tws
, tn
);
28662 gen_helper_msa_copy_u_h(cpu_env
, twd
, tws
, tn
);
28664 #if defined(TARGET_MIPS64)
28666 gen_helper_msa_copy_u_w(cpu_env
, twd
, tws
, tn
);
28674 case OPC_INSERT_df
:
28677 gen_helper_msa_insert_b(cpu_env
, twd
, tws
, tn
);
28680 gen_helper_msa_insert_h(cpu_env
, twd
, tws
, tn
);
28683 gen_helper_msa_insert_w(cpu_env
, twd
, tws
, tn
);
28685 #if defined(TARGET_MIPS64)
28687 gen_helper_msa_insert_d(cpu_env
, twd
, tws
, tn
);
28697 MIPS_INVAL("MSA instruction");
28698 generate_exception_end(ctx
, EXCP_RI
);
28700 tcg_temp_free_i32(twd
);
28701 tcg_temp_free_i32(tws
);
28702 tcg_temp_free_i32(tn
);
28703 tcg_temp_free_i32(tdf
);
28706 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
28708 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
28709 uint32_t df
= 0, n
= 0;
28711 if ((dfn
& 0x30) == 0x00) {
28714 } else if ((dfn
& 0x38) == 0x20) {
28717 } else if ((dfn
& 0x3c) == 0x30) {
28720 } else if ((dfn
& 0x3e) == 0x38) {
28723 } else if (dfn
== 0x3E) {
28724 /* CTCMSA, CFCMSA, MOVE.V */
28725 gen_msa_elm_3e(env
, ctx
);
28728 generate_exception_end(ctx
, EXCP_RI
);
28732 gen_msa_elm_df(env
, ctx
, df
, n
);
28735 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28737 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
28738 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
28739 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28740 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28741 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28743 TCGv_i32 twd
= tcg_const_i32(wd
);
28744 TCGv_i32 tws
= tcg_const_i32(ws
);
28745 TCGv_i32 twt
= tcg_const_i32(wt
);
28746 TCGv_i32 tdf
= tcg_temp_new_i32();
28748 /* adjust df value for floating-point instruction */
28749 tcg_gen_movi_i32(tdf
, df
+ 2);
28751 switch (MASK_MSA_3RF(ctx
->opcode
)) {
28753 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28756 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28759 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28762 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28765 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28768 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28771 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
28774 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28777 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28780 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
28783 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28786 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28789 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
28792 tcg_gen_movi_i32(tdf
, df
+ 1);
28793 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28796 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28799 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
28801 case OPC_MADD_Q_df
:
28802 tcg_gen_movi_i32(tdf
, df
+ 1);
28803 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28806 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28808 case OPC_MSUB_Q_df
:
28809 tcg_gen_movi_i32(tdf
, df
+ 1);
28810 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28813 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28816 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
28819 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
28822 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
28825 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
28828 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
28831 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28834 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28837 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
28840 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
28843 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
28846 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
28849 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
28851 case OPC_MULR_Q_df
:
28852 tcg_gen_movi_i32(tdf
, df
+ 1);
28853 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28856 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
28858 case OPC_FMIN_A_df
:
28859 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28861 case OPC_MADDR_Q_df
:
28862 tcg_gen_movi_i32(tdf
, df
+ 1);
28863 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28866 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
28869 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
28871 case OPC_MSUBR_Q_df
:
28872 tcg_gen_movi_i32(tdf
, df
+ 1);
28873 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
28876 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
28878 case OPC_FMAX_A_df
:
28879 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
28882 MIPS_INVAL("MSA instruction");
28883 generate_exception_end(ctx
, EXCP_RI
);
28887 tcg_temp_free_i32(twd
);
28888 tcg_temp_free_i32(tws
);
28889 tcg_temp_free_i32(twt
);
28890 tcg_temp_free_i32(tdf
);
28893 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
28895 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28896 (op & (0x7 << 18)))
28897 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28898 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28899 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28900 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
28901 TCGv_i32 twd
= tcg_const_i32(wd
);
28902 TCGv_i32 tws
= tcg_const_i32(ws
);
28903 TCGv_i32 twt
= tcg_const_i32(wt
);
28904 TCGv_i32 tdf
= tcg_const_i32(df
);
28906 switch (MASK_MSA_2R(ctx
->opcode
)) {
28908 #if !defined(TARGET_MIPS64)
28909 /* Double format valid only for MIPS64 */
28910 if (df
== DF_DOUBLE
) {
28911 generate_exception_end(ctx
, EXCP_RI
);
28915 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
28918 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
28921 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
28924 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
28927 MIPS_INVAL("MSA instruction");
28928 generate_exception_end(ctx
, EXCP_RI
);
28932 tcg_temp_free_i32(twd
);
28933 tcg_temp_free_i32(tws
);
28934 tcg_temp_free_i32(twt
);
28935 tcg_temp_free_i32(tdf
);
28938 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
28940 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
28941 (op & (0xf << 17)))
28942 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
28943 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
28944 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
28945 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
28946 TCGv_i32 twd
= tcg_const_i32(wd
);
28947 TCGv_i32 tws
= tcg_const_i32(ws
);
28948 TCGv_i32 twt
= tcg_const_i32(wt
);
28949 /* adjust df value for floating-point instruction */
28950 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
28952 switch (MASK_MSA_2RF(ctx
->opcode
)) {
28953 case OPC_FCLASS_df
:
28954 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
28956 case OPC_FTRUNC_S_df
:
28957 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
28959 case OPC_FTRUNC_U_df
:
28960 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
28963 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
28965 case OPC_FRSQRT_df
:
28966 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
28969 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
28972 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
28975 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
28977 case OPC_FEXUPL_df
:
28978 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
28980 case OPC_FEXUPR_df
:
28981 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
28984 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
28987 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
28989 case OPC_FTINT_S_df
:
28990 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
28992 case OPC_FTINT_U_df
:
28993 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
28995 case OPC_FFINT_S_df
:
28996 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
28998 case OPC_FFINT_U_df
:
28999 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
29003 tcg_temp_free_i32(twd
);
29004 tcg_temp_free_i32(tws
);
29005 tcg_temp_free_i32(twt
);
29006 tcg_temp_free_i32(tdf
);
29009 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
29011 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
29012 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
29013 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
29014 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29015 TCGv_i32 twd
= tcg_const_i32(wd
);
29016 TCGv_i32 tws
= tcg_const_i32(ws
);
29017 TCGv_i32 twt
= tcg_const_i32(wt
);
29019 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29021 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
29024 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
29027 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
29030 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
29033 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
29036 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
29039 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
29042 MIPS_INVAL("MSA instruction");
29043 generate_exception_end(ctx
, EXCP_RI
);
29047 tcg_temp_free_i32(twd
);
29048 tcg_temp_free_i32(tws
);
29049 tcg_temp_free_i32(twt
);
29052 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
29054 switch (MASK_MSA_VEC(ctx
->opcode
)) {
29062 gen_msa_vec_v(env
, ctx
);
29065 gen_msa_2r(env
, ctx
);
29068 gen_msa_2rf(env
, ctx
);
29071 MIPS_INVAL("MSA instruction");
29072 generate_exception_end(ctx
, EXCP_RI
);
29077 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
29079 uint32_t opcode
= ctx
->opcode
;
29080 check_insn(ctx
, ASE_MSA
);
29081 check_msa_access(ctx
);
29083 switch (MASK_MSA_MINOR(opcode
)) {
29084 case OPC_MSA_I8_00
:
29085 case OPC_MSA_I8_01
:
29086 case OPC_MSA_I8_02
:
29087 gen_msa_i8(env
, ctx
);
29089 case OPC_MSA_I5_06
:
29090 case OPC_MSA_I5_07
:
29091 gen_msa_i5(env
, ctx
);
29093 case OPC_MSA_BIT_09
:
29094 case OPC_MSA_BIT_0A
:
29095 gen_msa_bit(env
, ctx
);
29097 case OPC_MSA_3R_0D
:
29098 case OPC_MSA_3R_0E
:
29099 case OPC_MSA_3R_0F
:
29100 case OPC_MSA_3R_10
:
29101 case OPC_MSA_3R_11
:
29102 case OPC_MSA_3R_12
:
29103 case OPC_MSA_3R_13
:
29104 case OPC_MSA_3R_14
:
29105 case OPC_MSA_3R_15
:
29106 gen_msa_3r(env
, ctx
);
29109 gen_msa_elm(env
, ctx
);
29111 case OPC_MSA_3RF_1A
:
29112 case OPC_MSA_3RF_1B
:
29113 case OPC_MSA_3RF_1C
:
29114 gen_msa_3rf(env
, ctx
);
29117 gen_msa_vec(env
, ctx
);
29128 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
29129 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
29130 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
29131 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
29133 TCGv_i32 twd
= tcg_const_i32(wd
);
29134 TCGv taddr
= tcg_temp_new();
29135 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
29137 switch (MASK_MSA_MINOR(opcode
)) {
29139 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
29142 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
29145 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
29148 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
29151 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
29154 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
29157 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
29160 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
29164 tcg_temp_free_i32(twd
);
29165 tcg_temp_free(taddr
);
29169 MIPS_INVAL("MSA instruction");
29170 generate_exception_end(ctx
, EXCP_RI
);
29176 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
29179 int rs
, rt
, rd
, sa
;
29183 /* make sure instructions are on a word boundary */
29184 if (ctx
->base
.pc_next
& 0x3) {
29185 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
29186 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
29190 /* Handle blikely not taken case */
29191 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
29192 TCGLabel
*l1
= gen_new_label();
29194 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
29195 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
29196 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
29200 op
= MASK_OP_MAJOR(ctx
->opcode
);
29201 rs
= (ctx
->opcode
>> 21) & 0x1f;
29202 rt
= (ctx
->opcode
>> 16) & 0x1f;
29203 rd
= (ctx
->opcode
>> 11) & 0x1f;
29204 sa
= (ctx
->opcode
>> 6) & 0x1f;
29205 imm
= (int16_t)ctx
->opcode
;
29208 decode_opc_special(env
, ctx
);
29211 #if defined(TARGET_MIPS64)
29212 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
29213 decode_mmi(env
, ctx
);
29215 if (ctx
->insn_flags
& ASE_MXU
) {
29216 decode_opc_mxu(env
, ctx
);
29219 decode_opc_special2_legacy(env
, ctx
);
29223 #if defined(TARGET_MIPS64)
29224 if (ctx
->insn_flags
& INSN_R5900
) {
29225 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
29227 decode_opc_special3(env
, ctx
);
29230 decode_opc_special3(env
, ctx
);
29234 op1
= MASK_REGIMM(ctx
->opcode
);
29236 case OPC_BLTZL
: /* REGIMM branches */
29240 check_insn(ctx
, ISA_MIPS2
);
29241 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29245 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29249 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29251 /* OPC_NAL, OPC_BAL */
29252 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
29254 generate_exception_end(ctx
, EXCP_RI
);
29257 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
29260 case OPC_TGEI
: /* REGIMM traps */
29267 check_insn(ctx
, ISA_MIPS2
);
29268 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29269 gen_trap(ctx
, op1
, rs
, -1, imm
);
29272 check_insn(ctx
, ISA_MIPS32R6
);
29273 generate_exception_end(ctx
, EXCP_RI
);
29276 check_insn(ctx
, ISA_MIPS32R2
);
29278 * Break the TB to be able to sync copied instructions
29281 ctx
->base
.is_jmp
= DISAS_STOP
;
29283 case OPC_BPOSGE32
: /* MIPS DSP branch */
29284 #if defined(TARGET_MIPS64)
29288 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
29290 #if defined(TARGET_MIPS64)
29292 check_insn(ctx
, ISA_MIPS32R6
);
29293 check_mips_64(ctx
);
29295 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
29299 check_insn(ctx
, ISA_MIPS32R6
);
29300 check_mips_64(ctx
);
29302 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
29306 default: /* Invalid */
29307 MIPS_INVAL("regimm");
29308 generate_exception_end(ctx
, EXCP_RI
);
29313 check_cp0_enabled(ctx
);
29314 op1
= MASK_CP0(ctx
->opcode
);
29322 #if defined(TARGET_MIPS64)
29326 #ifndef CONFIG_USER_ONLY
29327 gen_cp0(env
, ctx
, op1
, rt
, rd
);
29328 #endif /* !CONFIG_USER_ONLY */
29346 #ifndef CONFIG_USER_ONLY
29347 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
29348 #endif /* !CONFIG_USER_ONLY */
29351 #ifndef CONFIG_USER_ONLY
29354 TCGv t0
= tcg_temp_new();
29356 op2
= MASK_MFMC0(ctx
->opcode
);
29360 gen_helper_dmt(t0
);
29361 gen_store_gpr(t0
, rt
);
29365 gen_helper_emt(t0
);
29366 gen_store_gpr(t0
, rt
);
29370 gen_helper_dvpe(t0
, cpu_env
);
29371 gen_store_gpr(t0
, rt
);
29375 gen_helper_evpe(t0
, cpu_env
);
29376 gen_store_gpr(t0
, rt
);
29379 check_insn(ctx
, ISA_MIPS32R6
);
29381 gen_helper_dvp(t0
, cpu_env
);
29382 gen_store_gpr(t0
, rt
);
29386 check_insn(ctx
, ISA_MIPS32R6
);
29388 gen_helper_evp(t0
, cpu_env
);
29389 gen_store_gpr(t0
, rt
);
29393 check_insn(ctx
, ISA_MIPS32R2
);
29394 save_cpu_state(ctx
, 1);
29395 gen_helper_di(t0
, cpu_env
);
29396 gen_store_gpr(t0
, rt
);
29398 * Stop translation as we may have switched
29399 * the execution mode.
29401 ctx
->base
.is_jmp
= DISAS_STOP
;
29404 check_insn(ctx
, ISA_MIPS32R2
);
29405 save_cpu_state(ctx
, 1);
29406 gen_helper_ei(t0
, cpu_env
);
29407 gen_store_gpr(t0
, rt
);
29409 * DISAS_STOP isn't sufficient, we need to ensure we break
29410 * out of translated code to check for pending interrupts.
29412 gen_save_pc(ctx
->base
.pc_next
+ 4);
29413 ctx
->base
.is_jmp
= DISAS_EXIT
;
29415 default: /* Invalid */
29416 MIPS_INVAL("mfmc0");
29417 generate_exception_end(ctx
, EXCP_RI
);
29422 #endif /* !CONFIG_USER_ONLY */
29425 check_insn(ctx
, ISA_MIPS32R2
);
29426 gen_load_srsgpr(rt
, rd
);
29429 check_insn(ctx
, ISA_MIPS32R2
);
29430 gen_store_srsgpr(rt
, rd
);
29434 generate_exception_end(ctx
, EXCP_RI
);
29438 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
29439 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29440 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
29441 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29444 /* Arithmetic with immediate opcode */
29445 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29449 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29451 case OPC_SLTI
: /* Set on less than with immediate opcode */
29453 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
29455 case OPC_ANDI
: /* Arithmetic with immediate opcode */
29456 case OPC_LUI
: /* OPC_AUI */
29459 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
29461 case OPC_J
: /* Jump */
29463 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29464 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29467 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
29468 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29470 generate_exception_end(ctx
, EXCP_RI
);
29473 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
29474 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29477 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29480 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
29481 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29483 generate_exception_end(ctx
, EXCP_RI
);
29486 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
29487 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29490 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29493 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
29496 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29498 check_insn(ctx
, ISA_MIPS32R6
);
29499 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
29500 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29503 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
29506 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29508 check_insn(ctx
, ISA_MIPS32R6
);
29509 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
29510 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29515 check_insn(ctx
, ISA_MIPS2
);
29516 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29520 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
29522 case OPC_LL
: /* Load and stores */
29523 check_insn(ctx
, ISA_MIPS2
);
29524 if (ctx
->insn_flags
& INSN_R5900
) {
29525 check_insn_opc_user_only(ctx
, INSN_R5900
);
29530 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29538 gen_ld(ctx
, op
, rt
, rs
, imm
);
29542 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29547 gen_st(ctx
, op
, rt
, rs
, imm
);
29550 check_insn(ctx
, ISA_MIPS2
);
29551 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29552 if (ctx
->insn_flags
& INSN_R5900
) {
29553 check_insn_opc_user_only(ctx
, INSN_R5900
);
29555 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
29558 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29559 check_cp0_enabled(ctx
);
29560 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
29561 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
29562 gen_cache_operation(ctx
, rt
, rs
, imm
);
29564 /* Treat as NOP. */
29567 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29568 if (ctx
->insn_flags
& INSN_R5900
) {
29569 /* Treat as NOP. */
29571 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
29572 /* Treat as NOP. */
29576 /* Floating point (COP1). */
29581 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
29585 op1
= MASK_CP1(ctx
->opcode
);
29590 check_cp1_enabled(ctx
);
29591 check_insn(ctx
, ISA_MIPS32R2
);
29597 check_cp1_enabled(ctx
);
29598 gen_cp1(ctx
, op1
, rt
, rd
);
29600 #if defined(TARGET_MIPS64)
29603 check_cp1_enabled(ctx
);
29604 check_insn(ctx
, ISA_MIPS3
);
29605 check_mips_64(ctx
);
29606 gen_cp1(ctx
, op1
, rt
, rd
);
29609 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
29610 check_cp1_enabled(ctx
);
29611 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29613 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29618 check_insn(ctx
, ASE_MIPS3D
);
29619 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29620 (rt
>> 2) & 0x7, imm
<< 2);
29624 check_cp1_enabled(ctx
);
29625 check_insn(ctx
, ISA_MIPS32R6
);
29626 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
29630 check_cp1_enabled(ctx
);
29631 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29633 check_insn(ctx
, ASE_MIPS3D
);
29636 check_cp1_enabled(ctx
);
29637 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29638 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
29639 (rt
>> 2) & 0x7, imm
<< 2);
29646 check_cp1_enabled(ctx
);
29647 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29653 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
29654 check_cp1_enabled(ctx
);
29655 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29657 case R6_OPC_CMP_AF_S
:
29658 case R6_OPC_CMP_UN_S
:
29659 case R6_OPC_CMP_EQ_S
:
29660 case R6_OPC_CMP_UEQ_S
:
29661 case R6_OPC_CMP_LT_S
:
29662 case R6_OPC_CMP_ULT_S
:
29663 case R6_OPC_CMP_LE_S
:
29664 case R6_OPC_CMP_ULE_S
:
29665 case R6_OPC_CMP_SAF_S
:
29666 case R6_OPC_CMP_SUN_S
:
29667 case R6_OPC_CMP_SEQ_S
:
29668 case R6_OPC_CMP_SEUQ_S
:
29669 case R6_OPC_CMP_SLT_S
:
29670 case R6_OPC_CMP_SULT_S
:
29671 case R6_OPC_CMP_SLE_S
:
29672 case R6_OPC_CMP_SULE_S
:
29673 case R6_OPC_CMP_OR_S
:
29674 case R6_OPC_CMP_UNE_S
:
29675 case R6_OPC_CMP_NE_S
:
29676 case R6_OPC_CMP_SOR_S
:
29677 case R6_OPC_CMP_SUNE_S
:
29678 case R6_OPC_CMP_SNE_S
:
29679 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29681 case R6_OPC_CMP_AF_D
:
29682 case R6_OPC_CMP_UN_D
:
29683 case R6_OPC_CMP_EQ_D
:
29684 case R6_OPC_CMP_UEQ_D
:
29685 case R6_OPC_CMP_LT_D
:
29686 case R6_OPC_CMP_ULT_D
:
29687 case R6_OPC_CMP_LE_D
:
29688 case R6_OPC_CMP_ULE_D
:
29689 case R6_OPC_CMP_SAF_D
:
29690 case R6_OPC_CMP_SUN_D
:
29691 case R6_OPC_CMP_SEQ_D
:
29692 case R6_OPC_CMP_SEUQ_D
:
29693 case R6_OPC_CMP_SLT_D
:
29694 case R6_OPC_CMP_SULT_D
:
29695 case R6_OPC_CMP_SLE_D
:
29696 case R6_OPC_CMP_SULE_D
:
29697 case R6_OPC_CMP_OR_D
:
29698 case R6_OPC_CMP_UNE_D
:
29699 case R6_OPC_CMP_NE_D
:
29700 case R6_OPC_CMP_SOR_D
:
29701 case R6_OPC_CMP_SUNE_D
:
29702 case R6_OPC_CMP_SNE_D
:
29703 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
29706 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
29707 rt
, rd
, sa
, (imm
>> 8) & 0x7);
29712 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
29727 check_insn(ctx
, ASE_MSA
);
29728 gen_msa_branch(env
, ctx
, op1
);
29732 generate_exception_end(ctx
, EXCP_RI
);
29737 /* Compact branches [R6] and COP2 [non-R6] */
29738 case OPC_BC
: /* OPC_LWC2 */
29739 case OPC_BALC
: /* OPC_SWC2 */
29740 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29741 /* OPC_BC, OPC_BALC */
29742 gen_compute_compact_branch(ctx
, op
, 0, 0,
29743 sextract32(ctx
->opcode
<< 2, 0, 28));
29745 /* OPC_LWC2, OPC_SWC2 */
29746 /* COP2: Not implemented. */
29747 generate_exception_err(ctx
, EXCP_CpU
, 2);
29750 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
29751 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
29752 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29754 /* OPC_BEQZC, OPC_BNEZC */
29755 gen_compute_compact_branch(ctx
, op
, rs
, 0,
29756 sextract32(ctx
->opcode
<< 2, 0, 23));
29758 /* OPC_JIC, OPC_JIALC */
29759 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
29762 /* OPC_LWC2, OPC_SWC2 */
29763 /* COP2: Not implemented. */
29764 generate_exception_err(ctx
, EXCP_CpU
, 2);
29768 check_insn(ctx
, INSN_LOONGSON2F
);
29769 /* Note that these instructions use different fields. */
29770 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
29774 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29775 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
29776 check_cp1_enabled(ctx
);
29777 op1
= MASK_CP3(ctx
->opcode
);
29781 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29787 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29788 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
29791 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29792 /* Treat as NOP. */
29795 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
29809 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
29810 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
29814 generate_exception_end(ctx
, EXCP_RI
);
29818 generate_exception_err(ctx
, EXCP_CpU
, 1);
29822 #if defined(TARGET_MIPS64)
29823 /* MIPS64 opcodes */
29825 if (ctx
->insn_flags
& INSN_R5900
) {
29826 check_insn_opc_user_only(ctx
, INSN_R5900
);
29831 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29835 check_insn(ctx
, ISA_MIPS3
);
29836 check_mips_64(ctx
);
29837 gen_ld(ctx
, op
, rt
, rs
, imm
);
29841 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29844 check_insn(ctx
, ISA_MIPS3
);
29845 check_mips_64(ctx
);
29846 gen_st(ctx
, op
, rt
, rs
, imm
);
29849 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
29850 check_insn(ctx
, ISA_MIPS3
);
29851 if (ctx
->insn_flags
& INSN_R5900
) {
29852 check_insn_opc_user_only(ctx
, INSN_R5900
);
29854 check_mips_64(ctx
);
29855 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
29857 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
29858 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29859 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
29860 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29863 check_insn(ctx
, ISA_MIPS3
);
29864 check_mips_64(ctx
);
29865 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29869 check_insn(ctx
, ISA_MIPS3
);
29870 check_mips_64(ctx
);
29871 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
29874 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
29875 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29876 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
29878 MIPS_INVAL("major opcode");
29879 generate_exception_end(ctx
, EXCP_RI
);
29883 case OPC_DAUI
: /* OPC_JALX */
29884 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
29885 #if defined(TARGET_MIPS64)
29887 check_mips_64(ctx
);
29889 generate_exception(ctx
, EXCP_RI
);
29890 } else if (rt
!= 0) {
29891 TCGv t0
= tcg_temp_new();
29892 gen_load_gpr(t0
, rs
);
29893 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
29897 generate_exception_end(ctx
, EXCP_RI
);
29898 MIPS_INVAL("major opcode");
29902 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
29903 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
29904 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
29907 case OPC_MSA
: /* OPC_MDMX */
29908 if (ctx
->insn_flags
& INSN_R5900
) {
29909 #if defined(TARGET_MIPS64)
29910 gen_mmi_lq(env
, ctx
); /* MMI_OPC_LQ */
29913 /* MDMX: Not implemented. */
29918 check_insn(ctx
, ISA_MIPS32R6
);
29919 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
29921 default: /* Invalid */
29922 MIPS_INVAL("major opcode");
29923 generate_exception_end(ctx
, EXCP_RI
);
29928 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
29930 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29931 CPUMIPSState
*env
= cs
->env_ptr
;
29933 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
29934 ctx
->saved_pc
= -1;
29935 ctx
->insn_flags
= env
->insn_flags
;
29936 ctx
->CP0_Config1
= env
->CP0_Config1
;
29937 ctx
->CP0_Config2
= env
->CP0_Config2
;
29938 ctx
->CP0_Config3
= env
->CP0_Config3
;
29939 ctx
->CP0_Config5
= env
->CP0_Config5
;
29941 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
29942 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
29943 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
29944 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
29945 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
29946 ctx
->PAMask
= env
->PAMask
;
29947 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
29948 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
29949 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
29950 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
29951 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
29952 /* Restore delay slot state from the tb context. */
29953 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
29954 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
29955 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
29956 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
29957 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
29958 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
29959 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
29960 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
29961 restore_cpu_state(env
, ctx
);
29962 #ifdef CONFIG_USER_ONLY
29963 ctx
->mem_idx
= MIPS_HFLAG_UM
;
29965 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
29967 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& ISA_MIPS32R6
) ?
29968 MO_UNALN
: MO_ALIGN
;
29970 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
29974 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29978 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
29980 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29982 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
29986 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
29987 const CPUBreakpoint
*bp
)
29989 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
29991 save_cpu_state(ctx
, 1);
29992 ctx
->base
.is_jmp
= DISAS_NORETURN
;
29993 gen_helper_raise_exception_debug(cpu_env
);
29995 * The address covered by the breakpoint must be included in
29996 * [tb->pc, tb->pc + tb->size) in order to for it to be
29997 * properly cleared -- thus we increment the PC here so that
29998 * the logic setting tb->size below does the right thing.
30000 ctx
->base
.pc_next
+= 4;
30004 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
30006 CPUMIPSState
*env
= cs
->env_ptr
;
30007 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30011 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
30012 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
30013 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30014 insn_bytes
= decode_nanomips_opc(env
, ctx
);
30015 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
30016 ctx
->opcode
= cpu_ldl_code(env
, ctx
->base
.pc_next
);
30018 decode_opc(env
, ctx
);
30019 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
30020 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30021 insn_bytes
= decode_micromips_opc(env
, ctx
);
30022 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
30023 ctx
->opcode
= cpu_lduw_code(env
, ctx
->base
.pc_next
);
30024 insn_bytes
= decode_mips16_opc(env
, ctx
);
30026 generate_exception_end(ctx
, EXCP_RI
);
30027 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
30031 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
30032 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
30033 MIPS_HFLAG_FBNSLOT
))) {
30035 * Force to generate branch as there is neither delay nor
30040 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
30041 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
30043 * Force to generate branch as microMIPS R6 doesn't restrict
30044 * branches in the forbidden slot.
30050 gen_branch(ctx
, insn_bytes
);
30052 ctx
->base
.pc_next
+= insn_bytes
;
30054 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
30058 * Execute a branch and its delay slot as a single instruction.
30059 * This is what GDB expects and is consistent with what the
30060 * hardware does (e.g. if a delay slot instruction faults, the
30061 * reported PC is the PC of the branch).
30063 if (ctx
->base
.singlestep_enabled
&&
30064 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
30065 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30067 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
30068 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
30072 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
30074 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
30076 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
30077 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
30078 gen_helper_raise_exception_debug(cpu_env
);
30080 switch (ctx
->base
.is_jmp
) {
30082 gen_save_pc(ctx
->base
.pc_next
);
30083 tcg_gen_lookup_and_goto_ptr();
30086 case DISAS_TOO_MANY
:
30087 save_cpu_state(ctx
, 0);
30088 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
30091 tcg_gen_exit_tb(NULL
, 0);
30093 case DISAS_NORETURN
:
30096 g_assert_not_reached();
30101 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
30103 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
30104 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
30107 static const TranslatorOps mips_tr_ops
= {
30108 .init_disas_context
= mips_tr_init_disas_context
,
30109 .tb_start
= mips_tr_tb_start
,
30110 .insn_start
= mips_tr_insn_start
,
30111 .breakpoint_check
= mips_tr_breakpoint_check
,
30112 .translate_insn
= mips_tr_translate_insn
,
30113 .tb_stop
= mips_tr_tb_stop
,
30114 .disas_log
= mips_tr_disas_log
,
30117 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
30121 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
30124 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, int flags
)
30127 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
30129 #define printfpr(fp) \
30132 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30133 " fd:%13g fs:%13g psu: %13g\n", \
30134 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
30135 (double)(fp)->fd, \
30136 (double)(fp)->fs[FP_ENDIAN_IDX], \
30137 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
30140 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
30141 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
30142 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
30143 " fd:%13g fs:%13g psu:%13g\n", \
30144 tmp.w[FP_ENDIAN_IDX], tmp.d, \
30146 (double)tmp.fs[FP_ENDIAN_IDX], \
30147 (double)tmp.fs[!FP_ENDIAN_IDX]); \
30153 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
30154 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
30155 get_float_exception_flags(&env
->active_fpu
.fp_status
));
30156 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
30157 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
30158 printfpr(&env
->active_fpu
.fpr
[i
]);
30164 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
30166 MIPSCPU
*cpu
= MIPS_CPU(cs
);
30167 CPUMIPSState
*env
= &cpu
->env
;
30170 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
30171 " LO=0x" TARGET_FMT_lx
" ds %04x "
30172 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
30173 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
30174 env
->hflags
, env
->btarget
, env
->bcond
);
30175 for (i
= 0; i
< 32; i
++) {
30176 if ((i
& 3) == 0) {
30177 qemu_fprintf(f
, "GPR%02d:", i
);
30179 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
30180 regnames
[i
], env
->active_tc
.gpr
[i
]);
30181 if ((i
& 3) == 3) {
30182 qemu_fprintf(f
, "\n");
30186 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
30187 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
30188 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
30190 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
30191 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
30192 env
->CP0_Config2
, env
->CP0_Config3
);
30193 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
30194 env
->CP0_Config4
, env
->CP0_Config5
);
30195 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
30196 fpu_dump_state(env
, f
, flags
);
30200 void mips_tcg_init(void)
30205 for (i
= 1; i
< 32; i
++)
30206 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30207 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
30210 for (i
= 0; i
< 32; i
++) {
30211 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
30213 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
30215 * The scalar floating-point unit (FPU) registers are mapped on
30216 * the MSA vector registers.
30218 fpu_f64
[i
] = msa_wr_d
[i
* 2];
30219 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
30220 msa_wr_d
[i
* 2 + 1] =
30221 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
30224 cpu_PC
= tcg_global_mem_new(cpu_env
,
30225 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
30226 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
30227 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
30228 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
30230 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
30231 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
30234 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
30235 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
30237 bcond
= tcg_global_mem_new(cpu_env
,
30238 offsetof(CPUMIPSState
, bcond
), "bcond");
30239 btarget
= tcg_global_mem_new(cpu_env
,
30240 offsetof(CPUMIPSState
, btarget
), "btarget");
30241 hflags
= tcg_global_mem_new_i32(cpu_env
,
30242 offsetof(CPUMIPSState
, hflags
), "hflags");
30244 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
30245 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
30247 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
30248 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
30250 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
30252 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
30255 #if defined(TARGET_MIPS64)
30257 for (i
= 1; i
< 32; i
++) {
30258 cpu_mmr
[i
] = tcg_global_mem_new_i64(cpu_env
,
30259 offsetof(CPUMIPSState
,
30265 #if !defined(TARGET_MIPS64)
30266 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
30267 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
30268 offsetof(CPUMIPSState
,
30269 active_tc
.mxu_gpr
[i
]),
30273 mxu_CR
= tcg_global_mem_new(cpu_env
,
30274 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
30275 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
30279 #include "translate_init.inc.c"
30281 void cpu_mips_realize_env(CPUMIPSState
*env
)
30283 env
->exception_base
= (int32_t)0xBFC00000;
30285 #ifndef CONFIG_USER_ONLY
30286 mmu_init(env
, env
->cpu_model
);
30288 fpu_init(env
, env
->cpu_model
);
30289 mvp_init(env
, env
->cpu_model
);
30292 bool cpu_supports_cps_smp(const char *cpu_type
)
30294 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30295 return (mcc
->cpu_def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
30298 bool cpu_supports_isa(const char *cpu_type
, uint64_t isa
)
30300 const MIPSCPUClass
*mcc
= MIPS_CPU_CLASS(object_class_by_name(cpu_type
));
30301 return (mcc
->cpu_def
->insn_flags
& isa
) != 0;
30304 void cpu_set_exception_base(int vp_index
, target_ulong address
)
30306 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
30307 vp
->env
.exception_base
= address
;
30310 void cpu_state_reset(CPUMIPSState
*env
)
30312 CPUState
*cs
= env_cpu(env
);
30314 /* Reset registers to their default values */
30315 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
30316 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
30317 #ifdef TARGET_WORDS_BIGENDIAN
30318 env
->CP0_Config0
|= (1 << CP0C0_BE
);
30320 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
30321 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
30322 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
30323 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
30324 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
30325 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
30326 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
30327 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
30328 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
30329 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
30330 << env
->cpu_model
->CP0_LLAddr_shift
;
30331 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
30332 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
30333 env
->CCRes
= env
->cpu_model
->CCRes
;
30334 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
30335 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
30336 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
30337 env
->current_tc
= 0;
30338 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
30339 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
30340 #if defined(TARGET_MIPS64)
30341 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
30342 env
->SEGMask
|= 3ULL << 62;
30345 env
->PABITS
= env
->cpu_model
->PABITS
;
30346 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
30347 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
30348 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
30349 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
30350 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
30351 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
30352 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
30353 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
30354 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
30355 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
30356 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
30357 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
30358 env
->CP0_EBaseWG_rw_bitmask
= env
->cpu_model
->CP0_EBaseWG_rw_bitmask
;
30359 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
30360 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
30361 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
30362 env
->msair
= env
->cpu_model
->MSAIR
;
30363 env
->insn_flags
= env
->cpu_model
->insn_flags
;
30365 #if defined(CONFIG_USER_ONLY)
30366 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
30367 # ifdef TARGET_MIPS64
30368 /* Enable 64-bit register mode. */
30369 env
->CP0_Status
|= (1 << CP0St_PX
);
30371 # ifdef TARGET_ABI_MIPSN64
30372 /* Enable 64-bit address mode. */
30373 env
->CP0_Status
|= (1 << CP0St_UX
);
30376 * Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
30377 * hardware registers.
30379 env
->CP0_HWREna
|= 0x0000000F;
30380 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
30381 env
->CP0_Status
|= (1 << CP0St_CU1
);
30383 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
30384 env
->CP0_Status
|= (1 << CP0St_MX
);
30386 # if defined(TARGET_MIPS64)
30387 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
30388 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
30389 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
30390 env
->CP0_Status
|= (1 << CP0St_FR
);
30394 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
30396 * If the exception was raised from a delay slot,
30397 * come back to the jump.
30399 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
30400 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
30402 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
30404 env
->active_tc
.PC
= env
->exception_base
;
30405 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
30406 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
30407 env
->CP0_Wired
= 0;
30408 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
30409 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
30410 if (mips_um_ksegs_enabled()) {
30411 env
->CP0_EBase
|= 0x40000000;
30413 env
->CP0_EBase
|= (int32_t)0x80000000;
30415 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
30416 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
30418 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
30420 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
30422 * Vectored interrupts not implemented, timer on int 7,
30423 * no performance counters.
30425 env
->CP0_IntCtl
= 0xe0000000;
30429 for (i
= 0; i
< 7; i
++) {
30430 env
->CP0_WatchLo
[i
] = 0;
30431 env
->CP0_WatchHi
[i
] = 0x80000000;
30433 env
->CP0_WatchLo
[7] = 0;
30434 env
->CP0_WatchHi
[7] = 0;
30436 /* Count register increments in debug mode, EJTAG version 1 */
30437 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
30439 cpu_mips_store_count(env
, 1);
30441 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
30444 /* Only TC0 on VPE 0 starts as active. */
30445 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
30446 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
30447 env
->tcs
[i
].CP0_TCHalt
= 1;
30449 env
->active_tc
.CP0_TCHalt
= 1;
30452 if (cs
->cpu_index
== 0) {
30453 /* VPE0 starts up enabled. */
30454 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
30455 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
30457 /* TC0 starts up unhalted. */
30459 env
->active_tc
.CP0_TCHalt
= 0;
30460 env
->tcs
[0].CP0_TCHalt
= 0;
30461 /* With thread 0 active. */
30462 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
30463 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
30468 * Configure default legacy segmentation control. We use this regardless of
30469 * whether segmentation control is presented to the guest.
30471 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
30472 env
->CP0_SegCtl0
= (CP0SC_AM_MK
<< CP0SC_AM
);
30473 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
30474 env
->CP0_SegCtl0
|= ((CP0SC_AM_MSK
<< CP0SC_AM
)) << 16;
30475 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
30476 env
->CP0_SegCtl1
= (0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30478 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
30479 env
->CP0_SegCtl1
|= ((0 << CP0SC_PA
) | (CP0SC_AM_UK
<< CP0SC_AM
) |
30480 (3 << CP0SC_C
)) << 16;
30481 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
30482 env
->CP0_SegCtl2
= (2 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30483 (1 << CP0SC_EU
) | (2 << CP0SC_C
);
30484 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
30485 env
->CP0_SegCtl2
|= ((0 << CP0SC_PA
) | (CP0SC_AM_MUSK
<< CP0SC_AM
) |
30486 (1 << CP0SC_EU
) | (2 << CP0SC_C
)) << 16;
30487 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
30488 env
->CP0_SegCtl1
|= (CP0SC_AM_UK
<< CP0SC1_XAM
);
30490 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
30491 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
30492 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
30493 env
->CP0_Status
|= (1 << CP0St_FR
);
30496 if (env
->insn_flags
& ISA_MIPS32R6
) {
30498 env
->CP0_PWSize
= 0x40;
30504 env
->CP0_PWField
= 0x0C30C302;
30511 env
->CP0_PWField
= 0x02;
30514 if (env
->CP0_Config3
& (1 << CP0C3_ISA
) & (1 << (CP0C3_ISA
+ 1))) {
30515 /* microMIPS on reset when Config3.ISA is 3 */
30516 env
->hflags
|= MIPS_HFLAG_M16
;
30520 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
30524 compute_hflags(env
);
30525 restore_fp_status(env
);
30526 restore_pamask(env
);
30527 cs
->exception_index
= EXCP_NONE
;
30529 if (semihosting_get_argc()) {
30530 /* UHI interface can be used to obtain argc and argv */
30531 env
->active_tc
.gpr
[4] = -1;
30535 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
30536 target_ulong
*data
)
30538 env
->active_tc
.PC
= data
[0];
30539 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
30540 env
->hflags
|= data
[1];
30541 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
30542 case MIPS_HFLAG_BR
:
30544 case MIPS_HFLAG_BC
:
30545 case MIPS_HFLAG_BL
:
30547 env
->btarget
= data
[2];