2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "disas/disas.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "sysemu/kvm.h"
32 #include "exec/semihost.h"
34 #include "trace-tcg.h"
36 #define MIPS_DEBUG_DISAS 0
38 /* MIPS major opcodes */
39 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
42 /* indirect opcode tables */
43 OPC_SPECIAL
= (0x00 << 26),
44 OPC_REGIMM
= (0x01 << 26),
45 OPC_CP0
= (0x10 << 26),
46 OPC_CP1
= (0x11 << 26),
47 OPC_CP2
= (0x12 << 26),
48 OPC_CP3
= (0x13 << 26),
49 OPC_SPECIAL2
= (0x1C << 26),
50 OPC_SPECIAL3
= (0x1F << 26),
51 /* arithmetic with immediate */
52 OPC_ADDI
= (0x08 << 26),
53 OPC_ADDIU
= (0x09 << 26),
54 OPC_SLTI
= (0x0A << 26),
55 OPC_SLTIU
= (0x0B << 26),
56 /* logic with immediate */
57 OPC_ANDI
= (0x0C << 26),
58 OPC_ORI
= (0x0D << 26),
59 OPC_XORI
= (0x0E << 26),
60 OPC_LUI
= (0x0F << 26),
61 /* arithmetic with immediate */
62 OPC_DADDI
= (0x18 << 26),
63 OPC_DADDIU
= (0x19 << 26),
64 /* Jump and branches */
66 OPC_JAL
= (0x03 << 26),
67 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
68 OPC_BEQL
= (0x14 << 26),
69 OPC_BNE
= (0x05 << 26),
70 OPC_BNEL
= (0x15 << 26),
71 OPC_BLEZ
= (0x06 << 26),
72 OPC_BLEZL
= (0x16 << 26),
73 OPC_BGTZ
= (0x07 << 26),
74 OPC_BGTZL
= (0x17 << 26),
75 OPC_JALX
= (0x1D << 26),
76 OPC_DAUI
= (0x1D << 26),
78 OPC_LDL
= (0x1A << 26),
79 OPC_LDR
= (0x1B << 26),
80 OPC_LB
= (0x20 << 26),
81 OPC_LH
= (0x21 << 26),
82 OPC_LWL
= (0x22 << 26),
83 OPC_LW
= (0x23 << 26),
84 OPC_LWPC
= OPC_LW
| 0x5,
85 OPC_LBU
= (0x24 << 26),
86 OPC_LHU
= (0x25 << 26),
87 OPC_LWR
= (0x26 << 26),
88 OPC_LWU
= (0x27 << 26),
89 OPC_SB
= (0x28 << 26),
90 OPC_SH
= (0x29 << 26),
91 OPC_SWL
= (0x2A << 26),
92 OPC_SW
= (0x2B << 26),
93 OPC_SDL
= (0x2C << 26),
94 OPC_SDR
= (0x2D << 26),
95 OPC_SWR
= (0x2E << 26),
96 OPC_LL
= (0x30 << 26),
97 OPC_LLD
= (0x34 << 26),
98 OPC_LD
= (0x37 << 26),
99 OPC_LDPC
= OPC_LD
| 0x5,
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* Compact Branches */
113 OPC_BLEZALC
= (0x06 << 26),
114 OPC_BGEZALC
= (0x06 << 26),
115 OPC_BGEUC
= (0x06 << 26),
116 OPC_BGTZALC
= (0x07 << 26),
117 OPC_BLTZALC
= (0x07 << 26),
118 OPC_BLTUC
= (0x07 << 26),
119 OPC_BOVC
= (0x08 << 26),
120 OPC_BEQZALC
= (0x08 << 26),
121 OPC_BEQC
= (0x08 << 26),
122 OPC_BLEZC
= (0x16 << 26),
123 OPC_BGEZC
= (0x16 << 26),
124 OPC_BGEC
= (0x16 << 26),
125 OPC_BGTZC
= (0x17 << 26),
126 OPC_BLTZC
= (0x17 << 26),
127 OPC_BLTC
= (0x17 << 26),
128 OPC_BNVC
= (0x18 << 26),
129 OPC_BNEZALC
= (0x18 << 26),
130 OPC_BNEC
= (0x18 << 26),
131 OPC_BC
= (0x32 << 26),
132 OPC_BEQZC
= (0x36 << 26),
133 OPC_JIC
= (0x36 << 26),
134 OPC_BALC
= (0x3A << 26),
135 OPC_BNEZC
= (0x3E << 26),
136 OPC_JIALC
= (0x3E << 26),
137 /* MDMX ASE specific */
138 OPC_MDMX
= (0x1E << 26),
139 /* MSA ASE, same as MDMX */
141 /* Cache and prefetch */
142 OPC_CACHE
= (0x2F << 26),
143 OPC_PREF
= (0x33 << 26),
144 /* PC-relative address computation / loads */
145 OPC_PCREL
= (0x3B << 26),
148 /* PC-relative address computation / loads */
149 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
150 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
152 /* Instructions determined by bits 19 and 20 */
153 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
154 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
155 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
157 /* Instructions determined by bits 16 ... 20 */
158 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
159 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
162 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
165 /* MIPS special opcodes */
166 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
170 OPC_SLL
= 0x00 | OPC_SPECIAL
,
171 /* NOP is SLL r0, r0, 0 */
172 /* SSNOP is SLL r0, r0, 1 */
173 /* EHB is SLL r0, r0, 3 */
174 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
175 OPC_ROTR
= OPC_SRL
| (1 << 21),
176 OPC_SRA
= 0x03 | OPC_SPECIAL
,
177 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
178 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
179 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
180 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
181 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
182 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
183 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
184 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
185 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
186 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
187 OPC_DROTR
= OPC_DSRL
| (1 << 21),
188 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
189 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
190 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
191 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
192 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
193 /* Multiplication / division */
194 OPC_MULT
= 0x18 | OPC_SPECIAL
,
195 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
196 OPC_DIV
= 0x1A | OPC_SPECIAL
,
197 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
198 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
199 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
200 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
201 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
203 /* 2 registers arithmetic / logic */
204 OPC_ADD
= 0x20 | OPC_SPECIAL
,
205 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
206 OPC_SUB
= 0x22 | OPC_SPECIAL
,
207 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
208 OPC_AND
= 0x24 | OPC_SPECIAL
,
209 OPC_OR
= 0x25 | OPC_SPECIAL
,
210 OPC_XOR
= 0x26 | OPC_SPECIAL
,
211 OPC_NOR
= 0x27 | OPC_SPECIAL
,
212 OPC_SLT
= 0x2A | OPC_SPECIAL
,
213 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
214 OPC_DADD
= 0x2C | OPC_SPECIAL
,
215 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
216 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
217 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
219 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
220 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
222 OPC_TGE
= 0x30 | OPC_SPECIAL
,
223 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
224 OPC_TLT
= 0x32 | OPC_SPECIAL
,
225 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
226 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
227 OPC_TNE
= 0x36 | OPC_SPECIAL
,
228 /* HI / LO registers load & stores */
229 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
230 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
231 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
232 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
233 /* Conditional moves */
234 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
235 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
237 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
238 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
240 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
243 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
244 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
245 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
246 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
247 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
249 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
250 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
251 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
252 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
255 /* R6 Multiply and Divide instructions have the same Opcode
256 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
257 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
260 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
261 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
262 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
263 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
264 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
265 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
266 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
267 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
269 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
270 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
271 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
272 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
273 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
274 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
275 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
276 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
278 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
279 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
280 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
281 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
282 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
284 OPC_LSA
= 0x05 | OPC_SPECIAL
,
285 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
288 /* Multiplication variants of the vr54xx. */
289 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
292 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
293 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
294 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
295 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
296 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
297 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
298 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
299 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
300 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
301 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
302 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
303 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
304 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
305 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
308 /* REGIMM (rt field) opcodes */
309 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
312 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
313 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
314 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
315 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
316 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
317 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
318 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
319 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
320 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
321 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
322 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
323 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
324 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
325 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
326 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
328 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
329 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
332 /* Special2 opcodes */
333 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
336 /* Multiply & xxx operations */
337 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
338 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
339 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
340 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
341 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
343 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
344 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
345 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
346 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
347 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
348 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
349 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
350 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
351 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
352 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
353 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
354 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
356 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
357 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
358 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
359 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
361 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
364 /* Special3 opcodes */
365 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
368 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
369 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
370 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
371 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
372 OPC_INS
= 0x04 | OPC_SPECIAL3
,
373 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
374 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
375 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
376 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
377 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
378 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
379 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
380 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
383 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
384 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
385 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
386 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
387 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
388 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
389 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
390 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
391 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
392 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
393 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
394 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
397 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
398 /* MIPS DSP Arithmetic */
399 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
400 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
401 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
402 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
403 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
404 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
405 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
406 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
407 /* MIPS DSP GPR-Based Shift Sub-class */
408 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
409 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
410 /* MIPS DSP Multiply Sub-class insns */
411 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
412 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
413 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
414 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
415 /* DSP Bit/Manipulation Sub-class */
416 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
417 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
418 /* MIPS DSP Append Sub-class */
419 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
420 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
421 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
422 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
423 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
426 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
427 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
428 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
429 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
430 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
431 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
435 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
438 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
439 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
440 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
441 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
442 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
443 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
447 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
450 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
451 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
452 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
453 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
454 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
457 /* MIPS DSP REGIMM opcodes */
459 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
460 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
463 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
466 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
467 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
468 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
469 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
472 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
474 /* MIPS DSP Arithmetic Sub-class */
475 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
476 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
477 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
478 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
479 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
482 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
483 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
484 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
489 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
493 /* MIPS DSP Multiply Sub-class insns */
494 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
497 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
498 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
502 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
503 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
505 /* MIPS DSP Arithmetic Sub-class */
506 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
507 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
508 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
509 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
510 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
522 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
525 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
527 /* MIPS DSP Arithmetic Sub-class */
528 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
529 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
530 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
531 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
532 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
541 /* DSP Bit/Manipulation Sub-class */
542 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
545 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
546 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
549 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
551 /* MIPS DSP Arithmetic Sub-class */
552 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
553 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
554 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
555 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
556 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
559 /* DSP Compare-Pick Sub-class */
560 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
563 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
564 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
577 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
579 /* MIPS DSP GPR-Based Shift Sub-class */
580 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
581 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
582 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
583 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
584 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
604 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
606 /* MIPS DSP Multiply Sub-class insns */
607 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
608 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
609 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
610 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
611 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
631 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
633 /* DSP Bit/Manipulation Sub-class */
634 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
637 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
639 /* MIPS DSP Append Sub-class */
640 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
641 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
642 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
645 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
647 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
648 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
649 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
650 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
651 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
652 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
660 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
661 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
662 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
663 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
664 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
667 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
669 /* MIPS DSP Arithmetic Sub-class */
670 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
671 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
672 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
673 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
674 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
687 /* DSP Bit/Manipulation Sub-class */
688 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
691 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
692 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
696 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
698 /* MIPS DSP Multiply Sub-class insns */
699 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
700 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
701 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
702 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
703 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
704 /* MIPS DSP Arithmetic Sub-class */
705 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
708 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
709 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
716 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
718 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
719 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
728 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
730 /* DSP Compare-Pick Sub-class */
731 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
732 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
733 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
734 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
735 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
750 /* MIPS DSP Arithmetic Sub-class */
751 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
754 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
755 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
761 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
763 /* DSP Append Sub-class */
764 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
765 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
766 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
767 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
770 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
772 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
773 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
774 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
775 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
776 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
777 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
796 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
798 /* DSP Bit/Manipulation Sub-class */
799 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
802 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
804 /* MIPS DSP Multiply Sub-class insns */
805 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
806 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
807 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
808 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
809 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
833 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
835 /* MIPS DSP GPR-Based Shift Sub-class */
836 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
837 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
838 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
839 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
840 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
864 /* Coprocessor 0 (rs field) */
865 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
868 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
869 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
870 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
871 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
872 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
873 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
874 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
875 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
876 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
877 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
878 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
879 OPC_C0
= (0x10 << 21) | OPC_CP0
,
880 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
881 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
885 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
888 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
889 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
890 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
891 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
892 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
893 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
896 /* Coprocessor 0 (with rs == C0) */
897 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
900 OPC_TLBR
= 0x01 | OPC_C0
,
901 OPC_TLBWI
= 0x02 | OPC_C0
,
902 OPC_TLBINV
= 0x03 | OPC_C0
,
903 OPC_TLBINVF
= 0x04 | OPC_C0
,
904 OPC_TLBWR
= 0x06 | OPC_C0
,
905 OPC_TLBP
= 0x08 | OPC_C0
,
906 OPC_RFE
= 0x10 | OPC_C0
,
907 OPC_ERET
= 0x18 | OPC_C0
,
908 OPC_DERET
= 0x1F | OPC_C0
,
909 OPC_WAIT
= 0x20 | OPC_C0
,
912 /* Coprocessor 1 (rs field) */
913 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
915 /* Values for the fmt field in FP instructions */
917 /* 0 - 15 are reserved */
918 FMT_S
= 16, /* single fp */
919 FMT_D
= 17, /* double fp */
920 FMT_E
= 18, /* extended fp */
921 FMT_Q
= 19, /* quad fp */
922 FMT_W
= 20, /* 32-bit fixed */
923 FMT_L
= 21, /* 64-bit fixed */
924 FMT_PS
= 22, /* paired single fp */
925 /* 23 - 31 are reserved */
929 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
930 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
931 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
932 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
933 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
934 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
935 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
936 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
937 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
938 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
939 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
940 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
941 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
942 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
943 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
944 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
945 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
946 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
947 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
948 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
949 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
950 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
951 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
952 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
953 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
954 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
955 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
956 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
957 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
958 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
961 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
962 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
965 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
966 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
967 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
968 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
972 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
973 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
977 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
978 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
981 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
984 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
985 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
986 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
987 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
988 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
989 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
990 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
991 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
992 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
993 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
994 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
997 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1000 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1001 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1002 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1003 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1004 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1005 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1010 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1011 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1012 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1013 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1014 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1019 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1020 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1021 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1022 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1023 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1024 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1028 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1029 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1030 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1031 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1032 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1037 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1038 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1039 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1040 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1041 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1044 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1045 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1046 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1047 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1048 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1051 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1052 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1053 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1054 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1055 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1058 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1059 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1060 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1061 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1062 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1065 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1066 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1067 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1068 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1069 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1072 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1073 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1074 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1075 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1076 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1079 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1080 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1081 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1082 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1083 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1086 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1087 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1088 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1089 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1090 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1094 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1097 OPC_LWXC1
= 0x00 | OPC_CP3
,
1098 OPC_LDXC1
= 0x01 | OPC_CP3
,
1099 OPC_LUXC1
= 0x05 | OPC_CP3
,
1100 OPC_SWXC1
= 0x08 | OPC_CP3
,
1101 OPC_SDXC1
= 0x09 | OPC_CP3
,
1102 OPC_SUXC1
= 0x0D | OPC_CP3
,
1103 OPC_PREFX
= 0x0F | OPC_CP3
,
1104 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1105 OPC_MADD_S
= 0x20 | OPC_CP3
,
1106 OPC_MADD_D
= 0x21 | OPC_CP3
,
1107 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1108 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1109 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1110 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1111 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1112 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1113 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1114 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1115 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1116 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1120 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1122 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1123 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1124 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1125 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1126 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1127 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1128 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1129 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1130 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1131 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1132 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1133 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1134 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1135 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1136 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1137 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1138 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1139 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1140 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1141 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1142 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1144 /* MI10 instruction */
1145 OPC_LD_B
= (0x20) | OPC_MSA
,
1146 OPC_LD_H
= (0x21) | OPC_MSA
,
1147 OPC_LD_W
= (0x22) | OPC_MSA
,
1148 OPC_LD_D
= (0x23) | OPC_MSA
,
1149 OPC_ST_B
= (0x24) | OPC_MSA
,
1150 OPC_ST_H
= (0x25) | OPC_MSA
,
1151 OPC_ST_W
= (0x26) | OPC_MSA
,
1152 OPC_ST_D
= (0x27) | OPC_MSA
,
1156 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1157 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1158 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1159 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1160 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1161 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1162 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1163 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1164 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1165 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1166 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1168 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1170 /* I8 instruction */
1171 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1172 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1173 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1174 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1175 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1176 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1177 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1178 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1180 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1182 /* VEC/2R/2RF instruction */
1183 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1184 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1185 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1186 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1187 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1188 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1189 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1191 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1192 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1194 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1195 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1196 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1197 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1198 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1200 /* 2RF instruction df(bit 16) = _w, _d */
1201 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1202 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1203 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1204 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1205 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1206 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1207 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1208 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1209 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1210 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1211 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1212 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1213 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1214 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1215 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1216 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1218 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1219 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1220 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1221 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1222 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1223 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1224 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1225 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1226 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1227 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1228 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1229 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1230 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1231 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1232 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1233 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1234 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1235 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1236 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1237 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1238 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1239 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1240 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1241 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1242 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1243 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1244 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1245 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1246 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1247 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1248 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1249 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1250 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1251 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1252 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1253 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1254 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1255 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1256 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1257 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1258 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1259 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1260 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1261 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1262 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1263 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1264 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1265 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1266 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1267 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1268 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1269 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1270 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1271 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1272 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1273 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1274 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1275 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1283 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1284 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1285 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1286 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1287 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1288 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1289 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1290 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1292 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1294 /* 3RF instruction _df(bit 21) = _w, _d */
1295 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1296 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1297 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1298 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1299 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1300 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1301 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1302 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1303 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1314 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1315 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1316 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1317 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1318 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1323 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1324 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1325 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1326 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1327 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1328 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1329 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1330 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1337 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1338 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1339 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1340 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1341 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1342 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1343 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1344 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1347 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1348 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1349 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1352 /* global register indices */
1353 static TCGv_ptr cpu_env
;
1354 static TCGv cpu_gpr
[32], cpu_PC
;
1355 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1356 static TCGv cpu_dspctrl
, btarget
, bcond
;
1357 static TCGv_i32 hflags
;
1358 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1359 static TCGv_i64 fpu_f64
[32];
1360 static TCGv_i64 msa_wr_d
[64];
1362 #include "exec/gen-icount.h"
1364 #define gen_helper_0e0i(name, arg) do { \
1365 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1366 gen_helper_##name(cpu_env, helper_tmp); \
1367 tcg_temp_free_i32(helper_tmp); \
1370 #define gen_helper_0e1i(name, arg1, arg2) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1372 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_1e0i(name, ret, arg1) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1378 gen_helper_##name(ret, cpu_env, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1384 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1390 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1402 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 typedef struct DisasContext
{
1407 struct TranslationBlock
*tb
;
1408 target_ulong pc
, saved_pc
;
1410 int singlestep_enabled
;
1412 int32_t CP0_Config1
;
1413 /* Routine used to access memory */
1415 TCGMemOp default_tcg_memop_mask
;
1416 uint32_t hflags
, saved_hflags
;
1418 target_ulong btarget
;
1427 int CP0_LLAddr_shift
;
1432 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1433 * exception condition */
1434 BS_STOP
= 1, /* We want to stop translation for any reason */
1435 BS_BRANCH
= 2, /* We reached a branch condition */
1436 BS_EXCP
= 3, /* We reached an exception condition */
1439 static const char * const regnames
[] = {
1440 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1441 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1442 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1443 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1446 static const char * const regnames_HI
[] = {
1447 "HI0", "HI1", "HI2", "HI3",
1450 static const char * const regnames_LO
[] = {
1451 "LO0", "LO1", "LO2", "LO3",
1454 static const char * const fregnames
[] = {
1455 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1456 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1457 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1458 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1461 static const char * const msaregnames
[] = {
1462 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1463 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1464 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1465 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1466 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1467 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1468 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1469 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1470 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1471 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1472 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1473 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1474 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1475 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1476 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1477 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1480 #define LOG_DISAS(...) \
1482 if (MIPS_DEBUG_DISAS) { \
1483 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1487 #define MIPS_INVAL(op) \
1489 if (MIPS_DEBUG_DISAS) { \
1490 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1491 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1492 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1493 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1497 /* General purpose registers moves. */
1498 static inline void gen_load_gpr (TCGv t
, int reg
)
1501 tcg_gen_movi_tl(t
, 0);
1503 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1506 static inline void gen_store_gpr (TCGv t
, int reg
)
1509 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1512 /* Moves to/from shadow registers. */
1513 static inline void gen_load_srsgpr (int from
, int to
)
1515 TCGv t0
= tcg_temp_new();
1518 tcg_gen_movi_tl(t0
, 0);
1520 TCGv_i32 t2
= tcg_temp_new_i32();
1521 TCGv_ptr addr
= tcg_temp_new_ptr();
1523 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1524 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1525 tcg_gen_andi_i32(t2
, t2
, 0xf);
1526 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1527 tcg_gen_ext_i32_ptr(addr
, t2
);
1528 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1530 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1531 tcg_temp_free_ptr(addr
);
1532 tcg_temp_free_i32(t2
);
1534 gen_store_gpr(t0
, to
);
1538 static inline void gen_store_srsgpr (int from
, int to
)
1541 TCGv t0
= tcg_temp_new();
1542 TCGv_i32 t2
= tcg_temp_new_i32();
1543 TCGv_ptr addr
= tcg_temp_new_ptr();
1545 gen_load_gpr(t0
, from
);
1546 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1547 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1548 tcg_gen_andi_i32(t2
, t2
, 0xf);
1549 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1550 tcg_gen_ext_i32_ptr(addr
, t2
);
1551 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1553 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1554 tcg_temp_free_ptr(addr
);
1555 tcg_temp_free_i32(t2
);
1561 static inline void gen_save_pc(target_ulong pc
)
1563 tcg_gen_movi_tl(cpu_PC
, pc
);
1566 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1568 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1569 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1570 gen_save_pc(ctx
->pc
);
1571 ctx
->saved_pc
= ctx
->pc
;
1573 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1574 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1575 ctx
->saved_hflags
= ctx
->hflags
;
1576 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1582 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1588 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1590 ctx
->saved_hflags
= ctx
->hflags
;
1591 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1597 ctx
->btarget
= env
->btarget
;
1602 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1604 TCGv_i32 texcp
= tcg_const_i32(excp
);
1605 TCGv_i32 terr
= tcg_const_i32(err
);
1606 save_cpu_state(ctx
, 1);
1607 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1608 tcg_temp_free_i32(terr
);
1609 tcg_temp_free_i32(texcp
);
1610 ctx
->bstate
= BS_EXCP
;
1613 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1615 gen_helper_0e0i(raise_exception
, excp
);
1618 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1620 generate_exception_err(ctx
, excp
, 0);
1623 /* Floating point register moves. */
1624 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1626 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1627 generate_exception(ctx
, EXCP_RI
);
1629 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1632 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1635 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1636 generate_exception(ctx
, EXCP_RI
);
1638 t64
= tcg_temp_new_i64();
1639 tcg_gen_extu_i32_i64(t64
, t
);
1640 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1641 tcg_temp_free_i64(t64
);
1644 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1646 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1647 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1649 gen_load_fpr32(ctx
, t
, reg
| 1);
1653 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1655 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1656 TCGv_i64 t64
= tcg_temp_new_i64();
1657 tcg_gen_extu_i32_i64(t64
, t
);
1658 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1659 tcg_temp_free_i64(t64
);
1661 gen_store_fpr32(ctx
, t
, reg
| 1);
1665 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1667 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1668 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1670 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1674 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1676 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1677 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1680 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1681 t0
= tcg_temp_new_i64();
1682 tcg_gen_shri_i64(t0
, t
, 32);
1683 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1684 tcg_temp_free_i64(t0
);
1688 static inline int get_fp_bit (int cc
)
1696 /* Addresses computation */
1697 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1699 tcg_gen_add_tl(ret
, arg0
, arg1
);
1701 #if defined(TARGET_MIPS64)
1702 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1703 tcg_gen_ext32s_i64(ret
, ret
);
1708 /* Addresses computation (translation time) */
1709 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1712 target_long sum
= base
+ offset
;
1714 #if defined(TARGET_MIPS64)
1715 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1722 /* Sign-extract the low 32-bits to a target_long. */
1723 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1725 #if defined(TARGET_MIPS64)
1726 tcg_gen_ext32s_i64(ret
, arg
);
1728 tcg_gen_extrl_i64_i32(ret
, arg
);
1732 /* Sign-extract the high 32-bits to a target_long. */
1733 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1735 #if defined(TARGET_MIPS64)
1736 tcg_gen_sari_i64(ret
, arg
, 32);
1738 tcg_gen_extrh_i64_i32(ret
, arg
);
1742 static inline void check_cp0_enabled(DisasContext
*ctx
)
1744 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1745 generate_exception_err(ctx
, EXCP_CpU
, 0);
1748 static inline void check_cp1_enabled(DisasContext
*ctx
)
1750 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1751 generate_exception_err(ctx
, EXCP_CpU
, 1);
1754 /* Verify that the processor is running with COP1X instructions enabled.
1755 This is associated with the nabla symbol in the MIPS32 and MIPS64
1758 static inline void check_cop1x(DisasContext
*ctx
)
1760 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1761 generate_exception_end(ctx
, EXCP_RI
);
1764 /* Verify that the processor is running with 64-bit floating-point
1765 operations enabled. */
1767 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1769 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1770 generate_exception_end(ctx
, EXCP_RI
);
1774 * Verify if floating point register is valid; an operation is not defined
1775 * if bit 0 of any register specification is set and the FR bit in the
1776 * Status register equals zero, since the register numbers specify an
1777 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1778 * in the Status register equals one, both even and odd register numbers
1779 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1781 * Multiple 64 bit wide registers can be checked by calling
1782 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1784 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1786 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1787 generate_exception_end(ctx
, EXCP_RI
);
1790 /* Verify that the processor is running with DSP instructions enabled.
1791 This is enabled by CP0 Status register MX(24) bit.
1794 static inline void check_dsp(DisasContext
*ctx
)
1796 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1797 if (ctx
->insn_flags
& ASE_DSP
) {
1798 generate_exception_end(ctx
, EXCP_DSPDIS
);
1800 generate_exception_end(ctx
, EXCP_RI
);
1805 static inline void check_dspr2(DisasContext
*ctx
)
1807 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1808 if (ctx
->insn_flags
& ASE_DSP
) {
1809 generate_exception_end(ctx
, EXCP_DSPDIS
);
1811 generate_exception_end(ctx
, EXCP_RI
);
1816 /* This code generates a "reserved instruction" exception if the
1817 CPU does not support the instruction set corresponding to flags. */
1818 static inline void check_insn(DisasContext
*ctx
, int flags
)
1820 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1821 generate_exception_end(ctx
, EXCP_RI
);
1825 /* This code generates a "reserved instruction" exception if the
1826 CPU has corresponding flag set which indicates that the instruction
1827 has been removed. */
1828 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1830 if (unlikely(ctx
->insn_flags
& flags
)) {
1831 generate_exception_end(ctx
, EXCP_RI
);
1835 /* This code generates a "reserved instruction" exception if the
1836 CPU does not support 64-bit paired-single (PS) floating point data type */
1837 static inline void check_ps(DisasContext
*ctx
)
1839 if (unlikely(!ctx
->ps
)) {
1840 generate_exception(ctx
, EXCP_RI
);
1842 check_cp1_64bitmode(ctx
);
1845 #ifdef TARGET_MIPS64
1846 /* This code generates a "reserved instruction" exception if 64-bit
1847 instructions are not enabled. */
1848 static inline void check_mips_64(DisasContext
*ctx
)
1850 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1851 generate_exception_end(ctx
, EXCP_RI
);
1855 #ifndef CONFIG_USER_ONLY
1856 static inline void check_mvh(DisasContext
*ctx
)
1858 if (unlikely(!ctx
->mvh
)) {
1859 generate_exception(ctx
, EXCP_RI
);
1864 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1865 calling interface for 32 and 64-bit FPRs. No sense in changing
1866 all callers for gen_load_fpr32 when we need the CTX parameter for
1868 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1869 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1870 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1871 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1872 int ft, int fs, int cc) \
1874 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1875 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1884 check_cp1_registers(ctx, fs | ft); \
1892 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1893 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1895 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1896 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1897 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1898 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1899 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1900 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1901 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1902 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1903 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1904 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1905 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1906 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1907 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1908 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1909 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1910 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1913 tcg_temp_free_i##bits (fp0); \
1914 tcg_temp_free_i##bits (fp1); \
1917 FOP_CONDS(, 0, d
, FMT_D
, 64)
1918 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1919 FOP_CONDS(, 0, s
, FMT_S
, 32)
1920 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1921 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1922 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1925 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1926 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1927 int ft, int fs, int fd) \
1929 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1930 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1931 if (ifmt == FMT_D) { \
1932 check_cp1_registers(ctx, fs | ft | fd); \
1934 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1935 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1938 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1941 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1944 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1947 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1950 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1953 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1956 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1959 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1962 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1965 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1968 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1971 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1974 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1977 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1980 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1983 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1986 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1989 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1992 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1995 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1998 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2001 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2007 tcg_temp_free_i ## bits (fp0); \
2008 tcg_temp_free_i ## bits (fp1); \
2011 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2012 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2014 #undef gen_ldcmp_fpr32
2015 #undef gen_ldcmp_fpr64
2017 /* load/store instructions. */
2018 #ifdef CONFIG_USER_ONLY
2019 #define OP_LD_ATOMIC(insn,fname) \
2020 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2022 TCGv t0 = tcg_temp_new(); \
2023 tcg_gen_mov_tl(t0, arg1); \
2024 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2025 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2026 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2027 tcg_temp_free(t0); \
2030 #define OP_LD_ATOMIC(insn,fname) \
2031 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2033 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2036 OP_LD_ATOMIC(ll
,ld32s
);
2037 #if defined(TARGET_MIPS64)
2038 OP_LD_ATOMIC(lld
,ld64
);
2042 #ifdef CONFIG_USER_ONLY
2043 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2044 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2046 TCGv t0 = tcg_temp_new(); \
2047 TCGLabel *l1 = gen_new_label(); \
2048 TCGLabel *l2 = gen_new_label(); \
2050 tcg_gen_andi_tl(t0, arg2, almask); \
2051 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2052 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2053 generate_exception(ctx, EXCP_AdES); \
2054 gen_set_label(l1); \
2055 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2056 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2057 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2058 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2059 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2060 generate_exception_end(ctx, EXCP_SC); \
2061 gen_set_label(l2); \
2062 tcg_gen_movi_tl(t0, 0); \
2063 gen_store_gpr(t0, rt); \
2064 tcg_temp_free(t0); \
2067 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2068 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2070 TCGv t0 = tcg_temp_new(); \
2071 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2072 gen_store_gpr(t0, rt); \
2073 tcg_temp_free(t0); \
2076 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2077 #if defined(TARGET_MIPS64)
2078 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2082 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2083 int base
, int16_t offset
)
2086 tcg_gen_movi_tl(addr
, offset
);
2087 } else if (offset
== 0) {
2088 gen_load_gpr(addr
, base
);
2090 tcg_gen_movi_tl(addr
, offset
);
2091 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2095 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2097 target_ulong pc
= ctx
->pc
;
2099 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2100 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2105 pc
&= ~(target_ulong
)3;
2110 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2111 int rt
, int base
, int16_t offset
)
2115 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2116 /* Loongson CPU uses a load to zero register for prefetch.
2117 We emulate it as a NOP. On other CPU we must perform the
2118 actual memory access. */
2122 t0
= tcg_temp_new();
2123 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2126 #if defined(TARGET_MIPS64)
2128 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2129 ctx
->default_tcg_memop_mask
);
2130 gen_store_gpr(t0
, rt
);
2133 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2134 ctx
->default_tcg_memop_mask
);
2135 gen_store_gpr(t0
, rt
);
2139 op_ld_lld(t0
, t0
, ctx
);
2140 gen_store_gpr(t0
, rt
);
2143 t1
= tcg_temp_new();
2144 /* Do a byte access to possibly trigger a page
2145 fault with the unaligned address. */
2146 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2147 tcg_gen_andi_tl(t1
, t0
, 7);
2148 #ifndef TARGET_WORDS_BIGENDIAN
2149 tcg_gen_xori_tl(t1
, t1
, 7);
2151 tcg_gen_shli_tl(t1
, t1
, 3);
2152 tcg_gen_andi_tl(t0
, t0
, ~7);
2153 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2154 tcg_gen_shl_tl(t0
, t0
, t1
);
2155 t2
= tcg_const_tl(-1);
2156 tcg_gen_shl_tl(t2
, t2
, t1
);
2157 gen_load_gpr(t1
, rt
);
2158 tcg_gen_andc_tl(t1
, t1
, t2
);
2160 tcg_gen_or_tl(t0
, t0
, t1
);
2162 gen_store_gpr(t0
, rt
);
2165 t1
= tcg_temp_new();
2166 /* Do a byte access to possibly trigger a page
2167 fault with the unaligned address. */
2168 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2169 tcg_gen_andi_tl(t1
, t0
, 7);
2170 #ifdef TARGET_WORDS_BIGENDIAN
2171 tcg_gen_xori_tl(t1
, t1
, 7);
2173 tcg_gen_shli_tl(t1
, t1
, 3);
2174 tcg_gen_andi_tl(t0
, t0
, ~7);
2175 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2176 tcg_gen_shr_tl(t0
, t0
, t1
);
2177 tcg_gen_xori_tl(t1
, t1
, 63);
2178 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2179 tcg_gen_shl_tl(t2
, t2
, t1
);
2180 gen_load_gpr(t1
, rt
);
2181 tcg_gen_and_tl(t1
, t1
, t2
);
2183 tcg_gen_or_tl(t0
, t0
, t1
);
2185 gen_store_gpr(t0
, rt
);
2188 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2189 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2191 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2192 gen_store_gpr(t0
, rt
);
2196 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2197 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2200 gen_store_gpr(t0
, rt
);
2203 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2204 ctx
->default_tcg_memop_mask
);
2205 gen_store_gpr(t0
, rt
);
2208 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2209 ctx
->default_tcg_memop_mask
);
2210 gen_store_gpr(t0
, rt
);
2213 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2214 ctx
->default_tcg_memop_mask
);
2215 gen_store_gpr(t0
, rt
);
2218 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2219 gen_store_gpr(t0
, rt
);
2222 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2223 gen_store_gpr(t0
, rt
);
2226 t1
= tcg_temp_new();
2227 /* Do a byte access to possibly trigger a page
2228 fault with the unaligned address. */
2229 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2230 tcg_gen_andi_tl(t1
, t0
, 3);
2231 #ifndef TARGET_WORDS_BIGENDIAN
2232 tcg_gen_xori_tl(t1
, t1
, 3);
2234 tcg_gen_shli_tl(t1
, t1
, 3);
2235 tcg_gen_andi_tl(t0
, t0
, ~3);
2236 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2237 tcg_gen_shl_tl(t0
, t0
, t1
);
2238 t2
= tcg_const_tl(-1);
2239 tcg_gen_shl_tl(t2
, t2
, t1
);
2240 gen_load_gpr(t1
, rt
);
2241 tcg_gen_andc_tl(t1
, t1
, t2
);
2243 tcg_gen_or_tl(t0
, t0
, t1
);
2245 tcg_gen_ext32s_tl(t0
, t0
);
2246 gen_store_gpr(t0
, rt
);
2249 t1
= tcg_temp_new();
2250 /* Do a byte access to possibly trigger a page
2251 fault with the unaligned address. */
2252 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2253 tcg_gen_andi_tl(t1
, t0
, 3);
2254 #ifdef TARGET_WORDS_BIGENDIAN
2255 tcg_gen_xori_tl(t1
, t1
, 3);
2257 tcg_gen_shli_tl(t1
, t1
, 3);
2258 tcg_gen_andi_tl(t0
, t0
, ~3);
2259 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2260 tcg_gen_shr_tl(t0
, t0
, t1
);
2261 tcg_gen_xori_tl(t1
, t1
, 31);
2262 t2
= tcg_const_tl(0xfffffffeull
);
2263 tcg_gen_shl_tl(t2
, t2
, t1
);
2264 gen_load_gpr(t1
, rt
);
2265 tcg_gen_and_tl(t1
, t1
, t2
);
2267 tcg_gen_or_tl(t0
, t0
, t1
);
2269 tcg_gen_ext32s_tl(t0
, t0
);
2270 gen_store_gpr(t0
, rt
);
2274 op_ld_ll(t0
, t0
, ctx
);
2275 gen_store_gpr(t0
, rt
);
2282 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2283 int base
, int16_t offset
)
2285 TCGv t0
= tcg_temp_new();
2286 TCGv t1
= tcg_temp_new();
2288 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2289 gen_load_gpr(t1
, rt
);
2291 #if defined(TARGET_MIPS64)
2293 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2294 ctx
->default_tcg_memop_mask
);
2297 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2300 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2305 ctx
->default_tcg_memop_mask
);
2308 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2309 ctx
->default_tcg_memop_mask
);
2312 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2315 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2318 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2326 /* Store conditional */
2327 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2328 int base
, int16_t offset
)
2332 #ifdef CONFIG_USER_ONLY
2333 t0
= tcg_temp_local_new();
2334 t1
= tcg_temp_local_new();
2336 t0
= tcg_temp_new();
2337 t1
= tcg_temp_new();
2339 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2340 gen_load_gpr(t1
, rt
);
2342 #if defined(TARGET_MIPS64)
2345 op_st_scd(t1
, t0
, rt
, ctx
);
2350 op_st_sc(t1
, t0
, rt
, ctx
);
2357 /* Load and store */
2358 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2359 int base
, int16_t offset
)
2361 TCGv t0
= tcg_temp_new();
2363 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2364 /* Don't do NOP if destination is zero: we must perform the actual
2369 TCGv_i32 fp0
= tcg_temp_new_i32();
2370 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2371 ctx
->default_tcg_memop_mask
);
2372 gen_store_fpr32(ctx
, fp0
, ft
);
2373 tcg_temp_free_i32(fp0
);
2378 TCGv_i32 fp0
= tcg_temp_new_i32();
2379 gen_load_fpr32(ctx
, fp0
, ft
);
2380 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2381 ctx
->default_tcg_memop_mask
);
2382 tcg_temp_free_i32(fp0
);
2387 TCGv_i64 fp0
= tcg_temp_new_i64();
2388 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2389 ctx
->default_tcg_memop_mask
);
2390 gen_store_fpr64(ctx
, fp0
, ft
);
2391 tcg_temp_free_i64(fp0
);
2396 TCGv_i64 fp0
= tcg_temp_new_i64();
2397 gen_load_fpr64(ctx
, fp0
, ft
);
2398 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2399 ctx
->default_tcg_memop_mask
);
2400 tcg_temp_free_i64(fp0
);
2404 MIPS_INVAL("flt_ldst");
2405 generate_exception_end(ctx
, EXCP_RI
);
2412 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2413 int rs
, int16_t imm
)
2415 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2416 check_cp1_enabled(ctx
);
2420 check_insn(ctx
, ISA_MIPS2
);
2423 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2426 generate_exception_err(ctx
, EXCP_CpU
, 1);
2430 /* Arithmetic with immediate operand */
2431 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2432 int rt
, int rs
, int16_t imm
)
2434 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2436 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2437 /* If no destination, treat it as a NOP.
2438 For addi, we must generate the overflow exception when needed. */
2444 TCGv t0
= tcg_temp_local_new();
2445 TCGv t1
= tcg_temp_new();
2446 TCGv t2
= tcg_temp_new();
2447 TCGLabel
*l1
= gen_new_label();
2449 gen_load_gpr(t1
, rs
);
2450 tcg_gen_addi_tl(t0
, t1
, uimm
);
2451 tcg_gen_ext32s_tl(t0
, t0
);
2453 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2454 tcg_gen_xori_tl(t2
, t0
, uimm
);
2455 tcg_gen_and_tl(t1
, t1
, t2
);
2457 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2459 /* operands of same sign, result different sign */
2460 generate_exception(ctx
, EXCP_OVERFLOW
);
2462 tcg_gen_ext32s_tl(t0
, t0
);
2463 gen_store_gpr(t0
, rt
);
2469 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2470 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2472 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2475 #if defined(TARGET_MIPS64)
2478 TCGv t0
= tcg_temp_local_new();
2479 TCGv t1
= tcg_temp_new();
2480 TCGv t2
= tcg_temp_new();
2481 TCGLabel
*l1
= gen_new_label();
2483 gen_load_gpr(t1
, rs
);
2484 tcg_gen_addi_tl(t0
, t1
, uimm
);
2486 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2487 tcg_gen_xori_tl(t2
, t0
, uimm
);
2488 tcg_gen_and_tl(t1
, t1
, t2
);
2490 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2492 /* operands of same sign, result different sign */
2493 generate_exception(ctx
, EXCP_OVERFLOW
);
2495 gen_store_gpr(t0
, rt
);
2501 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2503 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2510 /* Logic with immediate operand */
2511 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2512 int rt
, int rs
, int16_t imm
)
2517 /* If no destination, treat it as a NOP. */
2520 uimm
= (uint16_t)imm
;
2523 if (likely(rs
!= 0))
2524 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2526 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2530 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2532 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2535 if (likely(rs
!= 0))
2536 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2538 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2541 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2543 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2544 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2546 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2555 /* Set on less than with immediate operand */
2556 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2557 int rt
, int rs
, int16_t imm
)
2559 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2563 /* If no destination, treat it as a NOP. */
2566 t0
= tcg_temp_new();
2567 gen_load_gpr(t0
, rs
);
2570 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2573 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2579 /* Shifts with immediate operand */
2580 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2581 int rt
, int rs
, int16_t imm
)
2583 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2587 /* If no destination, treat it as a NOP. */
2591 t0
= tcg_temp_new();
2592 gen_load_gpr(t0
, rs
);
2595 tcg_gen_shli_tl(t0
, t0
, uimm
);
2596 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2599 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2603 tcg_gen_ext32u_tl(t0
, t0
);
2604 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2606 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2611 TCGv_i32 t1
= tcg_temp_new_i32();
2613 tcg_gen_trunc_tl_i32(t1
, t0
);
2614 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2615 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2616 tcg_temp_free_i32(t1
);
2618 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2621 #if defined(TARGET_MIPS64)
2623 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2626 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2629 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2633 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2635 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2639 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2642 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2645 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2648 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2657 int rd
, int rs
, int rt
)
2659 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2660 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2661 /* If no destination, treat it as a NOP.
2662 For add & sub, we must generate the overflow exception when needed. */
2669 TCGv t0
= tcg_temp_local_new();
2670 TCGv t1
= tcg_temp_new();
2671 TCGv t2
= tcg_temp_new();
2672 TCGLabel
*l1
= gen_new_label();
2674 gen_load_gpr(t1
, rs
);
2675 gen_load_gpr(t2
, rt
);
2676 tcg_gen_add_tl(t0
, t1
, t2
);
2677 tcg_gen_ext32s_tl(t0
, t0
);
2678 tcg_gen_xor_tl(t1
, t1
, t2
);
2679 tcg_gen_xor_tl(t2
, t0
, t2
);
2680 tcg_gen_andc_tl(t1
, t2
, t1
);
2682 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2684 /* operands of same sign, result different sign */
2685 generate_exception(ctx
, EXCP_OVERFLOW
);
2687 gen_store_gpr(t0
, rd
);
2692 if (rs
!= 0 && rt
!= 0) {
2693 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2694 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2695 } else if (rs
== 0 && rt
!= 0) {
2696 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2697 } else if (rs
!= 0 && rt
== 0) {
2698 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2700 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2705 TCGv t0
= tcg_temp_local_new();
2706 TCGv t1
= tcg_temp_new();
2707 TCGv t2
= tcg_temp_new();
2708 TCGLabel
*l1
= gen_new_label();
2710 gen_load_gpr(t1
, rs
);
2711 gen_load_gpr(t2
, rt
);
2712 tcg_gen_sub_tl(t0
, t1
, t2
);
2713 tcg_gen_ext32s_tl(t0
, t0
);
2714 tcg_gen_xor_tl(t2
, t1
, t2
);
2715 tcg_gen_xor_tl(t1
, t0
, t1
);
2716 tcg_gen_and_tl(t1
, t1
, t2
);
2718 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2720 /* operands of different sign, first operand and result different sign */
2721 generate_exception(ctx
, EXCP_OVERFLOW
);
2723 gen_store_gpr(t0
, rd
);
2728 if (rs
!= 0 && rt
!= 0) {
2729 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2730 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2731 } else if (rs
== 0 && rt
!= 0) {
2732 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2733 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2734 } else if (rs
!= 0 && rt
== 0) {
2735 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2737 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2740 #if defined(TARGET_MIPS64)
2743 TCGv t0
= tcg_temp_local_new();
2744 TCGv t1
= tcg_temp_new();
2745 TCGv t2
= tcg_temp_new();
2746 TCGLabel
*l1
= gen_new_label();
2748 gen_load_gpr(t1
, rs
);
2749 gen_load_gpr(t2
, rt
);
2750 tcg_gen_add_tl(t0
, t1
, t2
);
2751 tcg_gen_xor_tl(t1
, t1
, t2
);
2752 tcg_gen_xor_tl(t2
, t0
, t2
);
2753 tcg_gen_andc_tl(t1
, t2
, t1
);
2755 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2757 /* operands of same sign, result different sign */
2758 generate_exception(ctx
, EXCP_OVERFLOW
);
2760 gen_store_gpr(t0
, rd
);
2765 if (rs
!= 0 && rt
!= 0) {
2766 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2767 } else if (rs
== 0 && rt
!= 0) {
2768 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2769 } else if (rs
!= 0 && rt
== 0) {
2770 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2772 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2777 TCGv t0
= tcg_temp_local_new();
2778 TCGv t1
= tcg_temp_new();
2779 TCGv t2
= tcg_temp_new();
2780 TCGLabel
*l1
= gen_new_label();
2782 gen_load_gpr(t1
, rs
);
2783 gen_load_gpr(t2
, rt
);
2784 tcg_gen_sub_tl(t0
, t1
, t2
);
2785 tcg_gen_xor_tl(t2
, t1
, t2
);
2786 tcg_gen_xor_tl(t1
, t0
, t1
);
2787 tcg_gen_and_tl(t1
, t1
, t2
);
2789 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2791 /* operands of different sign, first operand and result different sign */
2792 generate_exception(ctx
, EXCP_OVERFLOW
);
2794 gen_store_gpr(t0
, rd
);
2799 if (rs
!= 0 && rt
!= 0) {
2800 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2801 } else if (rs
== 0 && rt
!= 0) {
2802 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2803 } else if (rs
!= 0 && rt
== 0) {
2804 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2806 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2811 if (likely(rs
!= 0 && rt
!= 0)) {
2812 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2813 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2821 /* Conditional move */
2822 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2823 int rd
, int rs
, int rt
)
2828 /* If no destination, treat it as a NOP. */
2832 t0
= tcg_temp_new();
2833 gen_load_gpr(t0
, rt
);
2834 t1
= tcg_const_tl(0);
2835 t2
= tcg_temp_new();
2836 gen_load_gpr(t2
, rs
);
2839 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2842 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2845 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2848 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2857 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2858 int rd
, int rs
, int rt
)
2861 /* If no destination, treat it as a NOP. */
2867 if (likely(rs
!= 0 && rt
!= 0)) {
2868 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2870 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2874 if (rs
!= 0 && rt
!= 0) {
2875 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2876 } else if (rs
== 0 && rt
!= 0) {
2877 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2878 } else if (rs
!= 0 && rt
== 0) {
2879 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2881 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2885 if (likely(rs
!= 0 && rt
!= 0)) {
2886 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2887 } else if (rs
== 0 && rt
!= 0) {
2888 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2889 } else if (rs
!= 0 && rt
== 0) {
2890 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2896 if (likely(rs
!= 0 && rt
!= 0)) {
2897 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2898 } else if (rs
== 0 && rt
!= 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2900 } else if (rs
!= 0 && rt
== 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2903 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2909 /* Set on lower than */
2910 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2911 int rd
, int rs
, int rt
)
2916 /* If no destination, treat it as a NOP. */
2920 t0
= tcg_temp_new();
2921 t1
= tcg_temp_new();
2922 gen_load_gpr(t0
, rs
);
2923 gen_load_gpr(t1
, rt
);
2926 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2929 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2937 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2938 int rd
, int rs
, int rt
)
2943 /* If no destination, treat it as a NOP.
2944 For add & sub, we must generate the overflow exception when needed. */
2948 t0
= tcg_temp_new();
2949 t1
= tcg_temp_new();
2950 gen_load_gpr(t0
, rs
);
2951 gen_load_gpr(t1
, rt
);
2954 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2955 tcg_gen_shl_tl(t0
, t1
, t0
);
2956 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2959 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2960 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2963 tcg_gen_ext32u_tl(t1
, t1
);
2964 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2965 tcg_gen_shr_tl(t0
, t1
, t0
);
2966 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2970 TCGv_i32 t2
= tcg_temp_new_i32();
2971 TCGv_i32 t3
= tcg_temp_new_i32();
2973 tcg_gen_trunc_tl_i32(t2
, t0
);
2974 tcg_gen_trunc_tl_i32(t3
, t1
);
2975 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2976 tcg_gen_rotr_i32(t2
, t3
, t2
);
2977 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2978 tcg_temp_free_i32(t2
);
2979 tcg_temp_free_i32(t3
);
2982 #if defined(TARGET_MIPS64)
2984 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2985 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2988 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2989 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2992 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2993 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2996 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2997 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3005 /* Arithmetic on HI/LO registers */
3006 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3008 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3019 #if defined(TARGET_MIPS64)
3021 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3025 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3029 #if defined(TARGET_MIPS64)
3031 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3035 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3040 #if defined(TARGET_MIPS64)
3042 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3046 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3049 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3054 #if defined(TARGET_MIPS64)
3056 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3060 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3063 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3069 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3072 TCGv t0
= tcg_const_tl(addr
);
3073 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3074 gen_store_gpr(t0
, reg
);
3078 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3084 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3087 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3088 addr
= addr_add(ctx
, pc
, offset
);
3089 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3093 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3094 addr
= addr_add(ctx
, pc
, offset
);
3095 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3097 #if defined(TARGET_MIPS64)
3100 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3101 addr
= addr_add(ctx
, pc
, offset
);
3102 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3106 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3109 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3110 addr
= addr_add(ctx
, pc
, offset
);
3111 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3116 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3117 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3118 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3121 #if defined(TARGET_MIPS64)
3122 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3123 case R6_OPC_LDPC
+ (1 << 16):
3124 case R6_OPC_LDPC
+ (2 << 16):
3125 case R6_OPC_LDPC
+ (3 << 16):
3127 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3128 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3129 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3133 MIPS_INVAL("OPC_PCREL");
3134 generate_exception_end(ctx
, EXCP_RI
);
3141 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3150 t0
= tcg_temp_new();
3151 t1
= tcg_temp_new();
3153 gen_load_gpr(t0
, rs
);
3154 gen_load_gpr(t1
, rt
);
3159 TCGv t2
= tcg_temp_new();
3160 TCGv t3
= tcg_temp_new();
3161 tcg_gen_ext32s_tl(t0
, t0
);
3162 tcg_gen_ext32s_tl(t1
, t1
);
3163 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3164 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3165 tcg_gen_and_tl(t2
, t2
, t3
);
3166 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3167 tcg_gen_or_tl(t2
, t2
, t3
);
3168 tcg_gen_movi_tl(t3
, 0);
3169 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3170 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3171 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3178 TCGv t2
= tcg_temp_new();
3179 TCGv t3
= tcg_temp_new();
3180 tcg_gen_ext32s_tl(t0
, t0
);
3181 tcg_gen_ext32s_tl(t1
, t1
);
3182 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3183 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3184 tcg_gen_and_tl(t2
, t2
, t3
);
3185 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3186 tcg_gen_or_tl(t2
, t2
, t3
);
3187 tcg_gen_movi_tl(t3
, 0);
3188 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3189 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3190 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3197 TCGv t2
= tcg_const_tl(0);
3198 TCGv t3
= tcg_const_tl(1);
3199 tcg_gen_ext32u_tl(t0
, t0
);
3200 tcg_gen_ext32u_tl(t1
, t1
);
3201 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3202 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3203 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3210 TCGv t2
= tcg_const_tl(0);
3211 TCGv t3
= tcg_const_tl(1);
3212 tcg_gen_ext32u_tl(t0
, t0
);
3213 tcg_gen_ext32u_tl(t1
, t1
);
3214 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3215 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3216 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3223 TCGv_i32 t2
= tcg_temp_new_i32();
3224 TCGv_i32 t3
= tcg_temp_new_i32();
3225 tcg_gen_trunc_tl_i32(t2
, t0
);
3226 tcg_gen_trunc_tl_i32(t3
, t1
);
3227 tcg_gen_mul_i32(t2
, t2
, t3
);
3228 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3229 tcg_temp_free_i32(t2
);
3230 tcg_temp_free_i32(t3
);
3235 TCGv_i32 t2
= tcg_temp_new_i32();
3236 TCGv_i32 t3
= tcg_temp_new_i32();
3237 tcg_gen_trunc_tl_i32(t2
, t0
);
3238 tcg_gen_trunc_tl_i32(t3
, t1
);
3239 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3241 tcg_temp_free_i32(t2
);
3242 tcg_temp_free_i32(t3
);
3247 TCGv_i32 t2
= tcg_temp_new_i32();
3248 TCGv_i32 t3
= tcg_temp_new_i32();
3249 tcg_gen_trunc_tl_i32(t2
, t0
);
3250 tcg_gen_trunc_tl_i32(t3
, t1
);
3251 tcg_gen_mul_i32(t2
, t2
, t3
);
3252 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3253 tcg_temp_free_i32(t2
);
3254 tcg_temp_free_i32(t3
);
3259 TCGv_i32 t2
= tcg_temp_new_i32();
3260 TCGv_i32 t3
= tcg_temp_new_i32();
3261 tcg_gen_trunc_tl_i32(t2
, t0
);
3262 tcg_gen_trunc_tl_i32(t3
, t1
);
3263 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3264 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3265 tcg_temp_free_i32(t2
);
3266 tcg_temp_free_i32(t3
);
3269 #if defined(TARGET_MIPS64)
3272 TCGv t2
= tcg_temp_new();
3273 TCGv t3
= tcg_temp_new();
3274 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3275 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3276 tcg_gen_and_tl(t2
, t2
, t3
);
3277 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3278 tcg_gen_or_tl(t2
, t2
, t3
);
3279 tcg_gen_movi_tl(t3
, 0);
3280 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3281 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3288 TCGv t2
= tcg_temp_new();
3289 TCGv t3
= tcg_temp_new();
3290 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3291 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3292 tcg_gen_and_tl(t2
, t2
, t3
);
3293 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3294 tcg_gen_or_tl(t2
, t2
, t3
);
3295 tcg_gen_movi_tl(t3
, 0);
3296 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3297 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3304 TCGv t2
= tcg_const_tl(0);
3305 TCGv t3
= tcg_const_tl(1);
3306 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3307 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3314 TCGv t2
= tcg_const_tl(0);
3315 TCGv t3
= tcg_const_tl(1);
3316 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3317 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3323 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3327 TCGv t2
= tcg_temp_new();
3328 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3333 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3337 TCGv t2
= tcg_temp_new();
3338 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3344 MIPS_INVAL("r6 mul/div");
3345 generate_exception_end(ctx
, EXCP_RI
);
3353 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3354 int acc
, int rs
, int rt
)
3358 t0
= tcg_temp_new();
3359 t1
= tcg_temp_new();
3361 gen_load_gpr(t0
, rs
);
3362 gen_load_gpr(t1
, rt
);
3371 TCGv t2
= tcg_temp_new();
3372 TCGv t3
= tcg_temp_new();
3373 tcg_gen_ext32s_tl(t0
, t0
);
3374 tcg_gen_ext32s_tl(t1
, t1
);
3375 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3376 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3377 tcg_gen_and_tl(t2
, t2
, t3
);
3378 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3379 tcg_gen_or_tl(t2
, t2
, t3
);
3380 tcg_gen_movi_tl(t3
, 0);
3381 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3382 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3383 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3384 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3385 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3392 TCGv t2
= tcg_const_tl(0);
3393 TCGv t3
= tcg_const_tl(1);
3394 tcg_gen_ext32u_tl(t0
, t0
);
3395 tcg_gen_ext32u_tl(t1
, t1
);
3396 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3397 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3398 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3399 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3400 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3407 TCGv_i32 t2
= tcg_temp_new_i32();
3408 TCGv_i32 t3
= tcg_temp_new_i32();
3409 tcg_gen_trunc_tl_i32(t2
, t0
);
3410 tcg_gen_trunc_tl_i32(t3
, t1
);
3411 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3412 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3413 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3414 tcg_temp_free_i32(t2
);
3415 tcg_temp_free_i32(t3
);
3420 TCGv_i32 t2
= tcg_temp_new_i32();
3421 TCGv_i32 t3
= tcg_temp_new_i32();
3422 tcg_gen_trunc_tl_i32(t2
, t0
);
3423 tcg_gen_trunc_tl_i32(t3
, t1
);
3424 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3425 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3426 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3427 tcg_temp_free_i32(t2
);
3428 tcg_temp_free_i32(t3
);
3431 #if defined(TARGET_MIPS64)
3434 TCGv t2
= tcg_temp_new();
3435 TCGv t3
= tcg_temp_new();
3436 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3437 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3438 tcg_gen_and_tl(t2
, t2
, t3
);
3439 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3440 tcg_gen_or_tl(t2
, t2
, t3
);
3441 tcg_gen_movi_tl(t3
, 0);
3442 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3443 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3444 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3451 TCGv t2
= tcg_const_tl(0);
3452 TCGv t3
= tcg_const_tl(1);
3453 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3454 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3455 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3461 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3464 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3469 TCGv_i64 t2
= tcg_temp_new_i64();
3470 TCGv_i64 t3
= tcg_temp_new_i64();
3472 tcg_gen_ext_tl_i64(t2
, t0
);
3473 tcg_gen_ext_tl_i64(t3
, t1
);
3474 tcg_gen_mul_i64(t2
, t2
, t3
);
3475 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3476 tcg_gen_add_i64(t2
, t2
, t3
);
3477 tcg_temp_free_i64(t3
);
3478 gen_move_low32(cpu_LO
[acc
], t2
);
3479 gen_move_high32(cpu_HI
[acc
], t2
);
3480 tcg_temp_free_i64(t2
);
3485 TCGv_i64 t2
= tcg_temp_new_i64();
3486 TCGv_i64 t3
= tcg_temp_new_i64();
3488 tcg_gen_ext32u_tl(t0
, t0
);
3489 tcg_gen_ext32u_tl(t1
, t1
);
3490 tcg_gen_extu_tl_i64(t2
, t0
);
3491 tcg_gen_extu_tl_i64(t3
, t1
);
3492 tcg_gen_mul_i64(t2
, t2
, t3
);
3493 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3494 tcg_gen_add_i64(t2
, t2
, t3
);
3495 tcg_temp_free_i64(t3
);
3496 gen_move_low32(cpu_LO
[acc
], t2
);
3497 gen_move_high32(cpu_HI
[acc
], t2
);
3498 tcg_temp_free_i64(t2
);
3503 TCGv_i64 t2
= tcg_temp_new_i64();
3504 TCGv_i64 t3
= tcg_temp_new_i64();
3506 tcg_gen_ext_tl_i64(t2
, t0
);
3507 tcg_gen_ext_tl_i64(t3
, t1
);
3508 tcg_gen_mul_i64(t2
, t2
, t3
);
3509 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3510 tcg_gen_sub_i64(t2
, t3
, t2
);
3511 tcg_temp_free_i64(t3
);
3512 gen_move_low32(cpu_LO
[acc
], t2
);
3513 gen_move_high32(cpu_HI
[acc
], t2
);
3514 tcg_temp_free_i64(t2
);
3519 TCGv_i64 t2
= tcg_temp_new_i64();
3520 TCGv_i64 t3
= tcg_temp_new_i64();
3522 tcg_gen_ext32u_tl(t0
, t0
);
3523 tcg_gen_ext32u_tl(t1
, t1
);
3524 tcg_gen_extu_tl_i64(t2
, t0
);
3525 tcg_gen_extu_tl_i64(t3
, t1
);
3526 tcg_gen_mul_i64(t2
, t2
, t3
);
3527 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3528 tcg_gen_sub_i64(t2
, t3
, t2
);
3529 tcg_temp_free_i64(t3
);
3530 gen_move_low32(cpu_LO
[acc
], t2
);
3531 gen_move_high32(cpu_HI
[acc
], t2
);
3532 tcg_temp_free_i64(t2
);
3536 MIPS_INVAL("mul/div");
3537 generate_exception_end(ctx
, EXCP_RI
);
3545 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3546 int rd
, int rs
, int rt
)
3548 TCGv t0
= tcg_temp_new();
3549 TCGv t1
= tcg_temp_new();
3551 gen_load_gpr(t0
, rs
);
3552 gen_load_gpr(t1
, rt
);
3555 case OPC_VR54XX_MULS
:
3556 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3558 case OPC_VR54XX_MULSU
:
3559 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3561 case OPC_VR54XX_MACC
:
3562 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3564 case OPC_VR54XX_MACCU
:
3565 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3567 case OPC_VR54XX_MSAC
:
3568 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3570 case OPC_VR54XX_MSACU
:
3571 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3573 case OPC_VR54XX_MULHI
:
3574 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3576 case OPC_VR54XX_MULHIU
:
3577 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3579 case OPC_VR54XX_MULSHI
:
3580 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3582 case OPC_VR54XX_MULSHIU
:
3583 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3585 case OPC_VR54XX_MACCHI
:
3586 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3588 case OPC_VR54XX_MACCHIU
:
3589 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3591 case OPC_VR54XX_MSACHI
:
3592 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3594 case OPC_VR54XX_MSACHIU
:
3595 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3598 MIPS_INVAL("mul vr54xx");
3599 generate_exception_end(ctx
, EXCP_RI
);
3602 gen_store_gpr(t0
, rd
);
3609 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3618 t0
= tcg_temp_new();
3619 gen_load_gpr(t0
, rs
);
3623 gen_helper_clo(cpu_gpr
[rd
], t0
);
3627 gen_helper_clz(cpu_gpr
[rd
], t0
);
3629 #if defined(TARGET_MIPS64)
3632 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3636 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3643 /* Godson integer instructions */
3644 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3645 int rd
, int rs
, int rt
)
3657 case OPC_MULTU_G_2E
:
3658 case OPC_MULTU_G_2F
:
3659 #if defined(TARGET_MIPS64)
3660 case OPC_DMULT_G_2E
:
3661 case OPC_DMULT_G_2F
:
3662 case OPC_DMULTU_G_2E
:
3663 case OPC_DMULTU_G_2F
:
3665 t0
= tcg_temp_new();
3666 t1
= tcg_temp_new();
3669 t0
= tcg_temp_local_new();
3670 t1
= tcg_temp_local_new();
3674 gen_load_gpr(t0
, rs
);
3675 gen_load_gpr(t1
, rt
);
3680 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3681 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3683 case OPC_MULTU_G_2E
:
3684 case OPC_MULTU_G_2F
:
3685 tcg_gen_ext32u_tl(t0
, t0
);
3686 tcg_gen_ext32u_tl(t1
, t1
);
3687 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3688 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3693 TCGLabel
*l1
= gen_new_label();
3694 TCGLabel
*l2
= gen_new_label();
3695 TCGLabel
*l3
= gen_new_label();
3696 tcg_gen_ext32s_tl(t0
, t0
);
3697 tcg_gen_ext32s_tl(t1
, t1
);
3698 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3699 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3702 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3703 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3704 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3707 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3708 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3715 TCGLabel
*l1
= gen_new_label();
3716 TCGLabel
*l2
= gen_new_label();
3717 tcg_gen_ext32u_tl(t0
, t0
);
3718 tcg_gen_ext32u_tl(t1
, t1
);
3719 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3720 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3723 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3724 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3731 TCGLabel
*l1
= gen_new_label();
3732 TCGLabel
*l2
= gen_new_label();
3733 TCGLabel
*l3
= gen_new_label();
3734 tcg_gen_ext32u_tl(t0
, t0
);
3735 tcg_gen_ext32u_tl(t1
, t1
);
3736 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3737 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3738 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3743 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3751 TCGLabel
*l1
= gen_new_label();
3752 TCGLabel
*l2
= gen_new_label();
3753 tcg_gen_ext32u_tl(t0
, t0
);
3754 tcg_gen_ext32u_tl(t1
, t1
);
3755 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3756 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3759 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3760 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3764 #if defined(TARGET_MIPS64)
3765 case OPC_DMULT_G_2E
:
3766 case OPC_DMULT_G_2F
:
3767 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3769 case OPC_DMULTU_G_2E
:
3770 case OPC_DMULTU_G_2F
:
3771 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3776 TCGLabel
*l1
= gen_new_label();
3777 TCGLabel
*l2
= gen_new_label();
3778 TCGLabel
*l3
= gen_new_label();
3779 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3783 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3784 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3785 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3788 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3792 case OPC_DDIVU_G_2E
:
3793 case OPC_DDIVU_G_2F
:
3795 TCGLabel
*l1
= gen_new_label();
3796 TCGLabel
*l2
= gen_new_label();
3797 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3798 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3801 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3808 TCGLabel
*l1
= gen_new_label();
3809 TCGLabel
*l2
= gen_new_label();
3810 TCGLabel
*l3
= gen_new_label();
3811 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3812 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3813 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3815 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3818 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3822 case OPC_DMODU_G_2E
:
3823 case OPC_DMODU_G_2F
:
3825 TCGLabel
*l1
= gen_new_label();
3826 TCGLabel
*l2
= gen_new_label();
3827 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3828 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3831 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3842 /* Loongson multimedia instructions */
3843 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3845 uint32_t opc
, shift_max
;
3848 opc
= MASK_LMI(ctx
->opcode
);
3854 t0
= tcg_temp_local_new_i64();
3855 t1
= tcg_temp_local_new_i64();
3858 t0
= tcg_temp_new_i64();
3859 t1
= tcg_temp_new_i64();
3863 gen_load_fpr64(ctx
, t0
, rs
);
3864 gen_load_fpr64(ctx
, t1
, rt
);
3866 #define LMI_HELPER(UP, LO) \
3867 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3868 #define LMI_HELPER_1(UP, LO) \
3869 case OPC_##UP: gen_helper_##LO(t0, t0); break
3870 #define LMI_DIRECT(UP, LO, OP) \
3871 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3874 LMI_HELPER(PADDSH
, paddsh
);
3875 LMI_HELPER(PADDUSH
, paddush
);
3876 LMI_HELPER(PADDH
, paddh
);
3877 LMI_HELPER(PADDW
, paddw
);
3878 LMI_HELPER(PADDSB
, paddsb
);
3879 LMI_HELPER(PADDUSB
, paddusb
);
3880 LMI_HELPER(PADDB
, paddb
);
3882 LMI_HELPER(PSUBSH
, psubsh
);
3883 LMI_HELPER(PSUBUSH
, psubush
);
3884 LMI_HELPER(PSUBH
, psubh
);
3885 LMI_HELPER(PSUBW
, psubw
);
3886 LMI_HELPER(PSUBSB
, psubsb
);
3887 LMI_HELPER(PSUBUSB
, psubusb
);
3888 LMI_HELPER(PSUBB
, psubb
);
3890 LMI_HELPER(PSHUFH
, pshufh
);
3891 LMI_HELPER(PACKSSWH
, packsswh
);
3892 LMI_HELPER(PACKSSHB
, packsshb
);
3893 LMI_HELPER(PACKUSHB
, packushb
);
3895 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3896 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3897 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3898 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3899 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3900 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3902 LMI_HELPER(PAVGH
, pavgh
);
3903 LMI_HELPER(PAVGB
, pavgb
);
3904 LMI_HELPER(PMAXSH
, pmaxsh
);
3905 LMI_HELPER(PMINSH
, pminsh
);
3906 LMI_HELPER(PMAXUB
, pmaxub
);
3907 LMI_HELPER(PMINUB
, pminub
);
3909 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3910 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3911 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3912 LMI_HELPER(PCMPGTH
, pcmpgth
);
3913 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3914 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3916 LMI_HELPER(PSLLW
, psllw
);
3917 LMI_HELPER(PSLLH
, psllh
);
3918 LMI_HELPER(PSRLW
, psrlw
);
3919 LMI_HELPER(PSRLH
, psrlh
);
3920 LMI_HELPER(PSRAW
, psraw
);
3921 LMI_HELPER(PSRAH
, psrah
);
3923 LMI_HELPER(PMULLH
, pmullh
);
3924 LMI_HELPER(PMULHH
, pmulhh
);
3925 LMI_HELPER(PMULHUH
, pmulhuh
);
3926 LMI_HELPER(PMADDHW
, pmaddhw
);
3928 LMI_HELPER(PASUBUB
, pasubub
);
3929 LMI_HELPER_1(BIADD
, biadd
);
3930 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3932 LMI_DIRECT(PADDD
, paddd
, add
);
3933 LMI_DIRECT(PSUBD
, psubd
, sub
);
3934 LMI_DIRECT(XOR_CP2
, xor, xor);
3935 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3936 LMI_DIRECT(AND_CP2
, and, and);
3937 LMI_DIRECT(PANDN
, pandn
, andc
);
3938 LMI_DIRECT(OR
, or, or);
3941 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3944 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3947 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3950 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3954 tcg_gen_andi_i64(t1
, t1
, 3);
3955 tcg_gen_shli_i64(t1
, t1
, 4);
3956 tcg_gen_shr_i64(t0
, t0
, t1
);
3957 tcg_gen_ext16u_i64(t0
, t0
);
3961 tcg_gen_add_i64(t0
, t0
, t1
);
3962 tcg_gen_ext32s_i64(t0
, t0
);
3965 tcg_gen_sub_i64(t0
, t0
, t1
);
3966 tcg_gen_ext32s_i64(t0
, t0
);
3988 /* Make sure shift count isn't TCG undefined behaviour. */
3989 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
3994 tcg_gen_shl_i64(t0
, t0
, t1
);
3998 /* Since SRA is UndefinedResult without sign-extended inputs,
3999 we can treat SRA and DSRA the same. */
4000 tcg_gen_sar_i64(t0
, t0
, t1
);
4003 /* We want to shift in zeros for SRL; zero-extend first. */
4004 tcg_gen_ext32u_i64(t0
, t0
);
4007 tcg_gen_shr_i64(t0
, t0
, t1
);
4011 if (shift_max
== 32) {
4012 tcg_gen_ext32s_i64(t0
, t0
);
4015 /* Shifts larger than MAX produce zero. */
4016 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4017 tcg_gen_neg_i64(t1
, t1
);
4018 tcg_gen_and_i64(t0
, t0
, t1
);
4024 TCGv_i64 t2
= tcg_temp_new_i64();
4025 TCGLabel
*lab
= gen_new_label();
4027 tcg_gen_mov_i64(t2
, t0
);
4028 tcg_gen_add_i64(t0
, t1
, t2
);
4029 if (opc
== OPC_ADD_CP2
) {
4030 tcg_gen_ext32s_i64(t0
, t0
);
4032 tcg_gen_xor_i64(t1
, t1
, t2
);
4033 tcg_gen_xor_i64(t2
, t2
, t0
);
4034 tcg_gen_andc_i64(t1
, t2
, t1
);
4035 tcg_temp_free_i64(t2
);
4036 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4037 generate_exception(ctx
, EXCP_OVERFLOW
);
4045 TCGv_i64 t2
= tcg_temp_new_i64();
4046 TCGLabel
*lab
= gen_new_label();
4048 tcg_gen_mov_i64(t2
, t0
);
4049 tcg_gen_sub_i64(t0
, t1
, t2
);
4050 if (opc
== OPC_SUB_CP2
) {
4051 tcg_gen_ext32s_i64(t0
, t0
);
4053 tcg_gen_xor_i64(t1
, t1
, t2
);
4054 tcg_gen_xor_i64(t2
, t2
, t0
);
4055 tcg_gen_and_i64(t1
, t1
, t2
);
4056 tcg_temp_free_i64(t2
);
4057 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4058 generate_exception(ctx
, EXCP_OVERFLOW
);
4064 tcg_gen_ext32u_i64(t0
, t0
);
4065 tcg_gen_ext32u_i64(t1
, t1
);
4066 tcg_gen_mul_i64(t0
, t0
, t1
);
4075 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4076 FD field is the CC field? */
4078 MIPS_INVAL("loongson_cp2");
4079 generate_exception_end(ctx
, EXCP_RI
);
4086 gen_store_fpr64(ctx
, t0
, rd
);
4088 tcg_temp_free_i64(t0
);
4089 tcg_temp_free_i64(t1
);
4093 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4094 int rs
, int rt
, int16_t imm
)
4097 TCGv t0
= tcg_temp_new();
4098 TCGv t1
= tcg_temp_new();
4101 /* Load needed operands */
4109 /* Compare two registers */
4111 gen_load_gpr(t0
, rs
);
4112 gen_load_gpr(t1
, rt
);
4122 /* Compare register to immediate */
4123 if (rs
!= 0 || imm
!= 0) {
4124 gen_load_gpr(t0
, rs
);
4125 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4132 case OPC_TEQ
: /* rs == rs */
4133 case OPC_TEQI
: /* r0 == 0 */
4134 case OPC_TGE
: /* rs >= rs */
4135 case OPC_TGEI
: /* r0 >= 0 */
4136 case OPC_TGEU
: /* rs >= rs unsigned */
4137 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4139 generate_exception_end(ctx
, EXCP_TRAP
);
4141 case OPC_TLT
: /* rs < rs */
4142 case OPC_TLTI
: /* r0 < 0 */
4143 case OPC_TLTU
: /* rs < rs unsigned */
4144 case OPC_TLTIU
: /* r0 < 0 unsigned */
4145 case OPC_TNE
: /* rs != rs */
4146 case OPC_TNEI
: /* r0 != 0 */
4147 /* Never trap: treat as NOP. */
4151 TCGLabel
*l1
= gen_new_label();
4156 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4160 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4164 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4168 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4172 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4176 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4179 generate_exception(ctx
, EXCP_TRAP
);
4186 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4188 TranslationBlock
*tb
;
4190 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
4191 likely(!ctx
->singlestep_enabled
)) {
4194 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
4197 if (ctx
->singlestep_enabled
) {
4198 save_cpu_state(ctx
, 0);
4199 gen_helper_raise_exception_debug(cpu_env
);
4205 /* Branches (before delay slot) */
4206 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4208 int rs
, int rt
, int32_t offset
,
4211 target_ulong btgt
= -1;
4213 int bcond_compute
= 0;
4214 TCGv t0
= tcg_temp_new();
4215 TCGv t1
= tcg_temp_new();
4217 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4218 #ifdef MIPS_DEBUG_DISAS
4219 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4220 TARGET_FMT_lx
"\n", ctx
->pc
);
4222 generate_exception_end(ctx
, EXCP_RI
);
4226 /* Load needed operands */
4232 /* Compare two registers */
4234 gen_load_gpr(t0
, rs
);
4235 gen_load_gpr(t1
, rt
);
4238 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4252 /* Compare to zero */
4254 gen_load_gpr(t0
, rs
);
4257 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4260 #if defined(TARGET_MIPS64)
4262 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4264 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4267 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4272 /* Jump to immediate */
4273 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4277 /* Jump to register */
4278 if (offset
!= 0 && offset
!= 16) {
4279 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4280 others are reserved. */
4281 MIPS_INVAL("jump hint");
4282 generate_exception_end(ctx
, EXCP_RI
);
4285 gen_load_gpr(btarget
, rs
);
4288 MIPS_INVAL("branch/jump");
4289 generate_exception_end(ctx
, EXCP_RI
);
4292 if (bcond_compute
== 0) {
4293 /* No condition to be computed */
4295 case OPC_BEQ
: /* rx == rx */
4296 case OPC_BEQL
: /* rx == rx likely */
4297 case OPC_BGEZ
: /* 0 >= 0 */
4298 case OPC_BGEZL
: /* 0 >= 0 likely */
4299 case OPC_BLEZ
: /* 0 <= 0 */
4300 case OPC_BLEZL
: /* 0 <= 0 likely */
4302 ctx
->hflags
|= MIPS_HFLAG_B
;
4304 case OPC_BGEZAL
: /* 0 >= 0 */
4305 case OPC_BGEZALL
: /* 0 >= 0 likely */
4306 /* Always take and link */
4308 ctx
->hflags
|= MIPS_HFLAG_B
;
4310 case OPC_BNE
: /* rx != rx */
4311 case OPC_BGTZ
: /* 0 > 0 */
4312 case OPC_BLTZ
: /* 0 < 0 */
4315 case OPC_BLTZAL
: /* 0 < 0 */
4316 /* Handle as an unconditional branch to get correct delay
4319 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4320 ctx
->hflags
|= MIPS_HFLAG_B
;
4322 case OPC_BLTZALL
: /* 0 < 0 likely */
4323 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4324 /* Skip the instruction in the delay slot */
4327 case OPC_BNEL
: /* rx != rx likely */
4328 case OPC_BGTZL
: /* 0 > 0 likely */
4329 case OPC_BLTZL
: /* 0 < 0 likely */
4330 /* Skip the instruction in the delay slot */
4334 ctx
->hflags
|= MIPS_HFLAG_B
;
4337 ctx
->hflags
|= MIPS_HFLAG_BX
;
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4344 ctx
->hflags
|= MIPS_HFLAG_BR
;
4348 ctx
->hflags
|= MIPS_HFLAG_BR
;
4351 MIPS_INVAL("branch/jump");
4352 generate_exception_end(ctx
, EXCP_RI
);
4358 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4361 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4364 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4367 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4370 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4373 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4376 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4380 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4384 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4387 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4390 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4393 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4396 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4399 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4402 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4404 #if defined(TARGET_MIPS64)
4406 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4410 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4413 ctx
->hflags
|= MIPS_HFLAG_BC
;
4416 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4419 ctx
->hflags
|= MIPS_HFLAG_BL
;
4422 MIPS_INVAL("conditional branch/jump");
4423 generate_exception_end(ctx
, EXCP_RI
);
4428 ctx
->btarget
= btgt
;
4430 switch (delayslot_size
) {
4432 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4435 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4440 int post_delay
= insn_bytes
+ delayslot_size
;
4441 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4443 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4447 if (insn_bytes
== 2)
4448 ctx
->hflags
|= MIPS_HFLAG_B16
;
4453 /* special3 bitfield operations */
4454 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4455 int rs
, int lsb
, int msb
)
4457 TCGv t0
= tcg_temp_new();
4458 TCGv t1
= tcg_temp_new();
4460 gen_load_gpr(t1
, rs
);
4463 if (lsb
+ msb
> 31) {
4466 tcg_gen_shri_tl(t0
, t1
, lsb
);
4468 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4470 tcg_gen_ext32s_tl(t0
, t0
);
4473 #if defined(TARGET_MIPS64)
4482 if (lsb
+ msb
> 63) {
4485 tcg_gen_shri_tl(t0
, t1
, lsb
);
4487 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4495 gen_load_gpr(t0
, rt
);
4496 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4497 tcg_gen_ext32s_tl(t0
, t0
);
4499 #if defined(TARGET_MIPS64)
4510 gen_load_gpr(t0
, rt
);
4511 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4516 MIPS_INVAL("bitops");
4517 generate_exception_end(ctx
, EXCP_RI
);
4522 gen_store_gpr(t0
, rt
);
4527 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4532 /* If no destination, treat it as a NOP. */
4536 t0
= tcg_temp_new();
4537 gen_load_gpr(t0
, rt
);
4541 TCGv t1
= tcg_temp_new();
4543 tcg_gen_shri_tl(t1
, t0
, 8);
4544 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4545 tcg_gen_shli_tl(t0
, t0
, 8);
4546 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4547 tcg_gen_or_tl(t0
, t0
, t1
);
4549 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4553 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4556 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4558 #if defined(TARGET_MIPS64)
4561 TCGv t1
= tcg_temp_new();
4563 tcg_gen_shri_tl(t1
, t0
, 8);
4564 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4565 tcg_gen_shli_tl(t0
, t0
, 8);
4566 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4567 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4573 TCGv t1
= tcg_temp_new();
4575 tcg_gen_shri_tl(t1
, t0
, 16);
4576 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4577 tcg_gen_shli_tl(t0
, t0
, 16);
4578 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4579 tcg_gen_or_tl(t0
, t0
, t1
);
4580 tcg_gen_shri_tl(t1
, t0
, 32);
4581 tcg_gen_shli_tl(t0
, t0
, 32);
4582 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4588 MIPS_INVAL("bsfhl");
4589 generate_exception_end(ctx
, EXCP_RI
);
4596 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4605 t0
= tcg_temp_new();
4606 t1
= tcg_temp_new();
4607 gen_load_gpr(t0
, rs
);
4608 gen_load_gpr(t1
, rt
);
4609 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4610 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4611 if (opc
== OPC_LSA
) {
4612 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4621 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4629 t0
= tcg_temp_new();
4630 gen_load_gpr(t0
, rt
);
4632 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4634 TCGv t1
= tcg_temp_new();
4635 gen_load_gpr(t1
, rs
);
4639 TCGv_i64 t2
= tcg_temp_new_i64();
4640 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4641 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4642 gen_move_low32(cpu_gpr
[rd
], t2
);
4643 tcg_temp_free_i64(t2
);
4646 #if defined(TARGET_MIPS64)
4648 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4649 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4650 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4660 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4667 t0
= tcg_temp_new();
4668 gen_load_gpr(t0
, rt
);
4671 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4673 #if defined(TARGET_MIPS64)
4675 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4682 #ifndef CONFIG_USER_ONLY
4683 /* CP0 (MMU and control) */
4684 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4686 TCGv_i64 t0
= tcg_temp_new_i64();
4687 TCGv_i64 t1
= tcg_temp_new_i64();
4689 tcg_gen_ext_tl_i64(t0
, arg
);
4690 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4691 #if defined(TARGET_MIPS64)
4692 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4694 tcg_gen_concat32_i64(t1
, t1
, t0
);
4696 tcg_gen_st_i64(t1
, cpu_env
, off
);
4697 tcg_temp_free_i64(t1
);
4698 tcg_temp_free_i64(t0
);
4701 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4703 TCGv_i64 t0
= tcg_temp_new_i64();
4704 TCGv_i64 t1
= tcg_temp_new_i64();
4706 tcg_gen_ext_tl_i64(t0
, arg
);
4707 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4708 tcg_gen_concat32_i64(t1
, t1
, t0
);
4709 tcg_gen_st_i64(t1
, cpu_env
, off
);
4710 tcg_temp_free_i64(t1
);
4711 tcg_temp_free_i64(t0
);
4714 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4716 TCGv_i64 t0
= tcg_temp_new_i64();
4718 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4719 #if defined(TARGET_MIPS64)
4720 tcg_gen_shri_i64(t0
, t0
, 30);
4722 tcg_gen_shri_i64(t0
, t0
, 32);
4724 gen_move_low32(arg
, t0
);
4725 tcg_temp_free_i64(t0
);
4728 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4730 TCGv_i64 t0
= tcg_temp_new_i64();
4732 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4733 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4734 gen_move_low32(arg
, t0
);
4735 tcg_temp_free_i64(t0
);
4738 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4740 TCGv_i32 t0
= tcg_temp_new_i32();
4742 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4743 tcg_gen_ext_i32_tl(arg
, t0
);
4744 tcg_temp_free_i32(t0
);
4747 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4749 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4750 tcg_gen_ext32s_tl(arg
, arg
);
4753 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4755 TCGv_i32 t0
= tcg_temp_new_i32();
4757 tcg_gen_trunc_tl_i32(t0
, arg
);
4758 tcg_gen_st_i32(t0
, cpu_env
, off
);
4759 tcg_temp_free_i32(t0
);
4762 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4764 const char *rn
= "invalid";
4766 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4767 goto mfhc0_read_zero
;
4774 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4778 goto mfhc0_read_zero
;
4784 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4788 goto mfhc0_read_zero
;
4794 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4795 ctx
->CP0_LLAddr_shift
);
4799 goto mfhc0_read_zero
;
4808 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4812 goto mfhc0_read_zero
;
4816 goto mfhc0_read_zero
;
4819 (void)rn
; /* avoid a compiler warning */
4820 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4824 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4825 tcg_gen_movi_tl(arg
, 0);
4828 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4830 const char *rn
= "invalid";
4831 uint64_t mask
= ctx
->PAMask
>> 36;
4833 if (!(ctx
->hflags
& MIPS_HFLAG_ELPA
)) {
4841 tcg_gen_andi_tl(arg
, arg
, mask
);
4842 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4852 tcg_gen_andi_tl(arg
, arg
, mask
);
4853 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4863 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4864 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4865 relevant for modern MIPS cores supporting MTHC0, therefore
4866 treating MTHC0 to LLAddr as NOP. */
4879 tcg_gen_andi_tl(arg
, arg
, mask
);
4880 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4891 (void)rn
; /* avoid a compiler warning */
4893 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4896 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4898 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4899 tcg_gen_movi_tl(arg
, 0);
4901 tcg_gen_movi_tl(arg
, ~0);
4905 #define CP0_CHECK(c) \
4908 goto cp0_unimplemented; \
4912 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4914 const char *rn
= "invalid";
4917 check_insn(ctx
, ISA_MIPS32
);
4923 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4927 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4928 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4932 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4933 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4937 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4938 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4942 goto cp0_unimplemented
;
4948 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4949 gen_helper_mfc0_random(arg
, cpu_env
);
4953 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4954 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4958 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4964 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4969 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4979 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4980 rn
= "VPEScheFBack";
4983 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4984 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4988 goto cp0_unimplemented
;
4995 TCGv_i64 tmp
= tcg_temp_new_i64();
4996 tcg_gen_ld_i64(tmp
, cpu_env
,
4997 offsetof(CPUMIPSState
, CP0_EntryLo0
));
4998 #if defined(TARGET_MIPS64)
5000 /* Move RI/XI fields to bits 31:30 */
5001 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5002 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5005 gen_move_low32(arg
, tmp
);
5006 tcg_temp_free_i64(tmp
);
5011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5012 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5016 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5017 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5022 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5027 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5032 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5037 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5042 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5046 goto cp0_unimplemented
;
5053 TCGv_i64 tmp
= tcg_temp_new_i64();
5054 tcg_gen_ld_i64(tmp
, cpu_env
,
5055 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5056 #if defined(TARGET_MIPS64)
5058 /* Move RI/XI fields to bits 31:30 */
5059 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5060 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5063 gen_move_low32(arg
, tmp
);
5064 tcg_temp_free_i64(tmp
);
5069 goto cp0_unimplemented
;
5075 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5076 tcg_gen_ext32s_tl(arg
, arg
);
5080 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5081 rn
= "ContextConfig";
5082 goto cp0_unimplemented
;
5085 CP0_CHECK(ctx
->ulri
);
5086 tcg_gen_ld32s_tl(arg
, cpu_env
,
5087 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5091 goto cp0_unimplemented
;
5097 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5101 check_insn(ctx
, ISA_MIPS32R2
);
5102 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5106 goto cp0_unimplemented
;
5112 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5116 check_insn(ctx
, ISA_MIPS32R2
);
5117 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5121 check_insn(ctx
, ISA_MIPS32R2
);
5122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5126 check_insn(ctx
, ISA_MIPS32R2
);
5127 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5131 check_insn(ctx
, ISA_MIPS32R2
);
5132 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5136 check_insn(ctx
, ISA_MIPS32R2
);
5137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5141 goto cp0_unimplemented
;
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5152 goto cp0_unimplemented
;
5158 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5159 tcg_gen_ext32s_tl(arg
, arg
);
5164 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5173 goto cp0_unimplemented
;
5179 /* Mark as an IO operation because we read the time. */
5180 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5183 gen_helper_mfc0_count(arg
, cpu_env
);
5184 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5187 /* Break the TB to be able to take timer interrupts immediately
5188 after reading count. */
5189 ctx
->bstate
= BS_STOP
;
5192 /* 6,7 are implementation dependent */
5194 goto cp0_unimplemented
;
5200 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5201 tcg_gen_ext32s_tl(arg
, arg
);
5205 goto cp0_unimplemented
;
5211 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5214 /* 6,7 are implementation dependent */
5216 goto cp0_unimplemented
;
5222 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5226 check_insn(ctx
, ISA_MIPS32R2
);
5227 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5231 check_insn(ctx
, ISA_MIPS32R2
);
5232 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5236 check_insn(ctx
, ISA_MIPS32R2
);
5237 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5241 goto cp0_unimplemented
;
5247 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5251 goto cp0_unimplemented
;
5257 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5258 tcg_gen_ext32s_tl(arg
, arg
);
5262 goto cp0_unimplemented
;
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5272 check_insn(ctx
, ISA_MIPS32R2
);
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5277 goto cp0_unimplemented
;
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5287 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5291 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5295 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5299 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5306 /* 6,7 are implementation dependent */
5308 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5316 goto cp0_unimplemented
;
5322 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5326 goto cp0_unimplemented
;
5332 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5336 goto cp0_unimplemented
;
5342 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5346 goto cp0_unimplemented
;
5352 #if defined(TARGET_MIPS64)
5353 check_insn(ctx
, ISA_MIPS3
);
5354 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5355 tcg_gen_ext32s_tl(arg
, arg
);
5360 goto cp0_unimplemented
;
5364 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5365 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5372 goto cp0_unimplemented
;
5376 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5377 rn
= "'Diagnostic"; /* implementation dependent */
5382 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5386 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5387 rn
= "TraceControl";
5390 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5391 rn
= "TraceControl2";
5394 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5395 rn
= "UserTraceData";
5398 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5402 goto cp0_unimplemented
;
5409 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5410 tcg_gen_ext32s_tl(arg
, arg
);
5414 goto cp0_unimplemented
;
5420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5421 rn
= "Performance0";
5424 // gen_helper_mfc0_performance1(arg);
5425 rn
= "Performance1";
5428 // gen_helper_mfc0_performance2(arg);
5429 rn
= "Performance2";
5432 // gen_helper_mfc0_performance3(arg);
5433 rn
= "Performance3";
5436 // gen_helper_mfc0_performance4(arg);
5437 rn
= "Performance4";
5440 // gen_helper_mfc0_performance5(arg);
5441 rn
= "Performance5";
5444 // gen_helper_mfc0_performance6(arg);
5445 rn
= "Performance6";
5448 // gen_helper_mfc0_performance7(arg);
5449 rn
= "Performance7";
5452 goto cp0_unimplemented
;
5456 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5462 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5466 goto cp0_unimplemented
;
5476 TCGv_i64 tmp
= tcg_temp_new_i64();
5477 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5478 gen_move_low32(arg
, tmp
);
5479 tcg_temp_free_i64(tmp
);
5487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5491 goto cp0_unimplemented
;
5500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5507 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5511 goto cp0_unimplemented
;
5517 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5518 tcg_gen_ext32s_tl(arg
, arg
);
5522 goto cp0_unimplemented
;
5529 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5533 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5534 tcg_gen_ld_tl(arg
, cpu_env
,
5535 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5536 tcg_gen_ext32s_tl(arg
, arg
);
5540 goto cp0_unimplemented
;
5544 goto cp0_unimplemented
;
5546 (void)rn
; /* avoid a compiler warning */
5547 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5551 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5552 gen_mfc0_unimplemented(ctx
, arg
);
5555 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5557 const char *rn
= "invalid";
5560 check_insn(ctx
, ISA_MIPS32
);
5562 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5570 gen_helper_mtc0_index(cpu_env
, arg
);
5574 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5575 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5579 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5584 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5589 goto cp0_unimplemented
;
5599 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5600 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5604 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5605 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5609 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5610 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5614 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5615 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5619 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5620 tcg_gen_st_tl(arg
, cpu_env
,
5621 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5625 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5626 tcg_gen_st_tl(arg
, cpu_env
,
5627 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5628 rn
= "VPEScheFBack";
5631 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5632 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5636 goto cp0_unimplemented
;
5642 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5646 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5647 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5651 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5652 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5656 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5657 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5661 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5662 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5666 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5667 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5671 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5672 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5676 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5677 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5681 goto cp0_unimplemented
;
5687 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5691 goto cp0_unimplemented
;
5697 gen_helper_mtc0_context(cpu_env
, arg
);
5701 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5702 rn
= "ContextConfig";
5703 goto cp0_unimplemented
;
5706 CP0_CHECK(ctx
->ulri
);
5707 tcg_gen_st_tl(arg
, cpu_env
,
5708 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5712 goto cp0_unimplemented
;
5718 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5722 check_insn(ctx
, ISA_MIPS32R2
);
5723 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5725 ctx
->bstate
= BS_STOP
;
5728 goto cp0_unimplemented
;
5734 gen_helper_mtc0_wired(cpu_env
, arg
);
5738 check_insn(ctx
, ISA_MIPS32R2
);
5739 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5743 check_insn(ctx
, ISA_MIPS32R2
);
5744 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5748 check_insn(ctx
, ISA_MIPS32R2
);
5749 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5753 check_insn(ctx
, ISA_MIPS32R2
);
5754 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5758 check_insn(ctx
, ISA_MIPS32R2
);
5759 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5763 goto cp0_unimplemented
;
5769 check_insn(ctx
, ISA_MIPS32R2
);
5770 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5771 ctx
->bstate
= BS_STOP
;
5775 goto cp0_unimplemented
;
5793 goto cp0_unimplemented
;
5799 gen_helper_mtc0_count(cpu_env
, arg
);
5802 /* 6,7 are implementation dependent */
5804 goto cp0_unimplemented
;
5810 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5814 goto cp0_unimplemented
;
5820 gen_helper_mtc0_compare(cpu_env
, arg
);
5823 /* 6,7 are implementation dependent */
5825 goto cp0_unimplemented
;
5831 save_cpu_state(ctx
, 1);
5832 gen_helper_mtc0_status(cpu_env
, arg
);
5833 /* BS_STOP isn't good enough here, hflags may have changed. */
5834 gen_save_pc(ctx
->pc
+ 4);
5835 ctx
->bstate
= BS_EXCP
;
5839 check_insn(ctx
, ISA_MIPS32R2
);
5840 gen_helper_mtc0_intctl(cpu_env
, arg
);
5841 /* Stop translation as we may have switched the execution mode */
5842 ctx
->bstate
= BS_STOP
;
5846 check_insn(ctx
, ISA_MIPS32R2
);
5847 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5848 /* Stop translation as we may have switched the execution mode */
5849 ctx
->bstate
= BS_STOP
;
5853 check_insn(ctx
, ISA_MIPS32R2
);
5854 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5855 /* Stop translation as we may have switched the execution mode */
5856 ctx
->bstate
= BS_STOP
;
5860 goto cp0_unimplemented
;
5866 save_cpu_state(ctx
, 1);
5867 gen_helper_mtc0_cause(cpu_env
, arg
);
5871 goto cp0_unimplemented
;
5877 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5881 goto cp0_unimplemented
;
5891 check_insn(ctx
, ISA_MIPS32R2
);
5892 gen_helper_mtc0_ebase(cpu_env
, arg
);
5896 goto cp0_unimplemented
;
5902 gen_helper_mtc0_config0(cpu_env
, arg
);
5904 /* Stop translation as we may have switched the execution mode */
5905 ctx
->bstate
= BS_STOP
;
5908 /* ignored, read only */
5912 gen_helper_mtc0_config2(cpu_env
, arg
);
5914 /* Stop translation as we may have switched the execution mode */
5915 ctx
->bstate
= BS_STOP
;
5918 gen_helper_mtc0_config3(cpu_env
, arg
);
5920 /* Stop translation as we may have switched the execution mode */
5921 ctx
->bstate
= BS_STOP
;
5924 gen_helper_mtc0_config4(cpu_env
, arg
);
5926 ctx
->bstate
= BS_STOP
;
5929 gen_helper_mtc0_config5(cpu_env
, arg
);
5931 /* Stop translation as we may have switched the execution mode */
5932 ctx
->bstate
= BS_STOP
;
5934 /* 6,7 are implementation dependent */
5944 rn
= "Invalid config selector";
5945 goto cp0_unimplemented
;
5951 gen_helper_mtc0_lladdr(cpu_env
, arg
);
5955 goto cp0_unimplemented
;
5961 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
5965 goto cp0_unimplemented
;
5971 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
5975 goto cp0_unimplemented
;
5981 #if defined(TARGET_MIPS64)
5982 check_insn(ctx
, ISA_MIPS3
);
5983 gen_helper_mtc0_xcontext(cpu_env
, arg
);
5988 goto cp0_unimplemented
;
5992 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5993 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5996 gen_helper_mtc0_framemask(cpu_env
, arg
);
6000 goto cp0_unimplemented
;
6005 rn
= "Diagnostic"; /* implementation dependent */
6010 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6011 /* BS_STOP isn't good enough here, hflags may have changed. */
6012 gen_save_pc(ctx
->pc
+ 4);
6013 ctx
->bstate
= BS_EXCP
;
6017 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6018 rn
= "TraceControl";
6019 /* Stop translation as we may have switched the execution mode */
6020 ctx
->bstate
= BS_STOP
;
6023 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6024 rn
= "TraceControl2";
6025 /* Stop translation as we may have switched the execution mode */
6026 ctx
->bstate
= BS_STOP
;
6029 /* Stop translation as we may have switched the execution mode */
6030 ctx
->bstate
= BS_STOP
;
6031 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6032 rn
= "UserTraceData";
6033 /* Stop translation as we may have switched the execution mode */
6034 ctx
->bstate
= BS_STOP
;
6037 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6038 /* Stop translation as we may have switched the execution mode */
6039 ctx
->bstate
= BS_STOP
;
6043 goto cp0_unimplemented
;
6050 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6054 goto cp0_unimplemented
;
6060 gen_helper_mtc0_performance0(cpu_env
, arg
);
6061 rn
= "Performance0";
6064 // gen_helper_mtc0_performance1(arg);
6065 rn
= "Performance1";
6068 // gen_helper_mtc0_performance2(arg);
6069 rn
= "Performance2";
6072 // gen_helper_mtc0_performance3(arg);
6073 rn
= "Performance3";
6076 // gen_helper_mtc0_performance4(arg);
6077 rn
= "Performance4";
6080 // gen_helper_mtc0_performance5(arg);
6081 rn
= "Performance5";
6084 // gen_helper_mtc0_performance6(arg);
6085 rn
= "Performance6";
6088 // gen_helper_mtc0_performance7(arg);
6089 rn
= "Performance7";
6092 goto cp0_unimplemented
;
6106 goto cp0_unimplemented
;
6115 gen_helper_mtc0_taglo(cpu_env
, arg
);
6122 gen_helper_mtc0_datalo(cpu_env
, arg
);
6126 goto cp0_unimplemented
;
6135 gen_helper_mtc0_taghi(cpu_env
, arg
);
6142 gen_helper_mtc0_datahi(cpu_env
, arg
);
6147 goto cp0_unimplemented
;
6153 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6157 goto cp0_unimplemented
;
6164 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6168 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6169 tcg_gen_st_tl(arg
, cpu_env
,
6170 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6174 goto cp0_unimplemented
;
6176 /* Stop translation as we may have switched the execution mode */
6177 ctx
->bstate
= BS_STOP
;
6180 goto cp0_unimplemented
;
6182 (void)rn
; /* avoid a compiler warning */
6183 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6184 /* For simplicity assume that all writes can cause interrupts. */
6185 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6187 ctx
->bstate
= BS_STOP
;
6192 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6195 #if defined(TARGET_MIPS64)
6196 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6198 const char *rn
= "invalid";
6201 check_insn(ctx
, ISA_MIPS64
);
6207 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6211 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6212 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6216 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6217 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6221 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6222 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6226 goto cp0_unimplemented
;
6232 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6233 gen_helper_mfc0_random(arg
, cpu_env
);
6237 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6238 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6242 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6243 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6247 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6252 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6253 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6257 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6258 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6262 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6263 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6264 rn
= "VPEScheFBack";
6267 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6272 goto cp0_unimplemented
;
6278 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6282 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6283 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6287 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6288 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6292 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6293 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6297 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6298 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6302 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6303 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6313 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6317 goto cp0_unimplemented
;
6323 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6327 goto cp0_unimplemented
;
6333 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6337 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6338 rn
= "ContextConfig";
6339 goto cp0_unimplemented
;
6342 CP0_CHECK(ctx
->ulri
);
6343 tcg_gen_ld_tl(arg
, cpu_env
,
6344 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6348 goto cp0_unimplemented
;
6354 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6358 check_insn(ctx
, ISA_MIPS32R2
);
6359 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6363 goto cp0_unimplemented
;
6369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6373 check_insn(ctx
, ISA_MIPS32R2
);
6374 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6378 check_insn(ctx
, ISA_MIPS32R2
);
6379 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6383 check_insn(ctx
, ISA_MIPS32R2
);
6384 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6388 check_insn(ctx
, ISA_MIPS32R2
);
6389 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6393 check_insn(ctx
, ISA_MIPS32R2
);
6394 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6398 goto cp0_unimplemented
;
6404 check_insn(ctx
, ISA_MIPS32R2
);
6405 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6409 goto cp0_unimplemented
;
6415 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6420 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6425 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6429 goto cp0_unimplemented
;
6435 /* Mark as an IO operation because we read the time. */
6436 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6439 gen_helper_mfc0_count(arg
, cpu_env
);
6440 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6443 /* Break the TB to be able to take timer interrupts immediately
6444 after reading count. */
6445 ctx
->bstate
= BS_STOP
;
6448 /* 6,7 are implementation dependent */
6450 goto cp0_unimplemented
;
6456 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6460 goto cp0_unimplemented
;
6466 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6469 /* 6,7 are implementation dependent */
6471 goto cp0_unimplemented
;
6477 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6481 check_insn(ctx
, ISA_MIPS32R2
);
6482 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6486 check_insn(ctx
, ISA_MIPS32R2
);
6487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6491 check_insn(ctx
, ISA_MIPS32R2
);
6492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6496 goto cp0_unimplemented
;
6502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6506 goto cp0_unimplemented
;
6512 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6516 goto cp0_unimplemented
;
6522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6526 check_insn(ctx
, ISA_MIPS32R2
);
6527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6531 goto cp0_unimplemented
;
6537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6545 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6549 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6557 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6560 /* 6,7 are implementation dependent */
6562 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6566 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6570 goto cp0_unimplemented
;
6576 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6580 goto cp0_unimplemented
;
6586 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6590 goto cp0_unimplemented
;
6596 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6600 goto cp0_unimplemented
;
6606 check_insn(ctx
, ISA_MIPS3
);
6607 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6611 goto cp0_unimplemented
;
6615 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6616 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6619 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6623 goto cp0_unimplemented
;
6627 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6628 rn
= "'Diagnostic"; /* implementation dependent */
6633 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6637 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6638 rn
= "TraceControl";
6641 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6642 rn
= "TraceControl2";
6645 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6646 rn
= "UserTraceData";
6649 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6653 goto cp0_unimplemented
;
6660 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6664 goto cp0_unimplemented
;
6670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6671 rn
= "Performance0";
6674 // gen_helper_dmfc0_performance1(arg);
6675 rn
= "Performance1";
6678 // gen_helper_dmfc0_performance2(arg);
6679 rn
= "Performance2";
6682 // gen_helper_dmfc0_performance3(arg);
6683 rn
= "Performance3";
6686 // gen_helper_dmfc0_performance4(arg);
6687 rn
= "Performance4";
6690 // gen_helper_dmfc0_performance5(arg);
6691 rn
= "Performance5";
6694 // gen_helper_dmfc0_performance6(arg);
6695 rn
= "Performance6";
6698 // gen_helper_dmfc0_performance7(arg);
6699 rn
= "Performance7";
6702 goto cp0_unimplemented
;
6706 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6713 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6717 goto cp0_unimplemented
;
6726 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6733 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6737 goto cp0_unimplemented
;
6746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6757 goto cp0_unimplemented
;
6763 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6767 goto cp0_unimplemented
;
6774 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6778 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6779 tcg_gen_ld_tl(arg
, cpu_env
,
6780 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6784 goto cp0_unimplemented
;
6788 goto cp0_unimplemented
;
6790 (void)rn
; /* avoid a compiler warning */
6791 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6795 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6796 gen_mfc0_unimplemented(ctx
, arg
);
6799 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6801 const char *rn
= "invalid";
6804 check_insn(ctx
, ISA_MIPS64
);
6806 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6814 gen_helper_mtc0_index(cpu_env
, arg
);
6818 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6819 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6823 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6828 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6833 goto cp0_unimplemented
;
6843 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6844 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6848 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6849 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6853 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6854 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6858 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6859 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6863 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6864 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6868 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6869 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6870 rn
= "VPEScheFBack";
6873 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6874 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6878 goto cp0_unimplemented
;
6884 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
6888 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6889 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6893 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6894 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6898 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6899 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6904 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6909 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6914 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6918 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6919 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6923 goto cp0_unimplemented
;
6929 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
6933 goto cp0_unimplemented
;
6939 gen_helper_mtc0_context(cpu_env
, arg
);
6943 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6944 rn
= "ContextConfig";
6945 goto cp0_unimplemented
;
6948 CP0_CHECK(ctx
->ulri
);
6949 tcg_gen_st_tl(arg
, cpu_env
,
6950 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6954 goto cp0_unimplemented
;
6960 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6964 check_insn(ctx
, ISA_MIPS32R2
);
6965 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6969 goto cp0_unimplemented
;
6975 gen_helper_mtc0_wired(cpu_env
, arg
);
6979 check_insn(ctx
, ISA_MIPS32R2
);
6980 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6984 check_insn(ctx
, ISA_MIPS32R2
);
6985 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6989 check_insn(ctx
, ISA_MIPS32R2
);
6990 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6994 check_insn(ctx
, ISA_MIPS32R2
);
6995 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6999 check_insn(ctx
, ISA_MIPS32R2
);
7000 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7004 goto cp0_unimplemented
;
7010 check_insn(ctx
, ISA_MIPS32R2
);
7011 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7012 ctx
->bstate
= BS_STOP
;
7016 goto cp0_unimplemented
;
7034 goto cp0_unimplemented
;
7040 gen_helper_mtc0_count(cpu_env
, arg
);
7043 /* 6,7 are implementation dependent */
7045 goto cp0_unimplemented
;
7047 /* Stop translation as we may have switched the execution mode */
7048 ctx
->bstate
= BS_STOP
;
7053 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7057 goto cp0_unimplemented
;
7063 gen_helper_mtc0_compare(cpu_env
, arg
);
7066 /* 6,7 are implementation dependent */
7068 goto cp0_unimplemented
;
7070 /* Stop translation as we may have switched the execution mode */
7071 ctx
->bstate
= BS_STOP
;
7076 save_cpu_state(ctx
, 1);
7077 gen_helper_mtc0_status(cpu_env
, arg
);
7078 /* BS_STOP isn't good enough here, hflags may have changed. */
7079 gen_save_pc(ctx
->pc
+ 4);
7080 ctx
->bstate
= BS_EXCP
;
7084 check_insn(ctx
, ISA_MIPS32R2
);
7085 gen_helper_mtc0_intctl(cpu_env
, arg
);
7086 /* Stop translation as we may have switched the execution mode */
7087 ctx
->bstate
= BS_STOP
;
7091 check_insn(ctx
, ISA_MIPS32R2
);
7092 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7093 /* Stop translation as we may have switched the execution mode */
7094 ctx
->bstate
= BS_STOP
;
7098 check_insn(ctx
, ISA_MIPS32R2
);
7099 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7100 /* Stop translation as we may have switched the execution mode */
7101 ctx
->bstate
= BS_STOP
;
7105 goto cp0_unimplemented
;
7111 save_cpu_state(ctx
, 1);
7112 /* Mark as an IO operation because we may trigger a software
7114 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7117 gen_helper_mtc0_cause(cpu_env
, arg
);
7118 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7121 /* Stop translation as we may have triggered an intetrupt */
7122 ctx
->bstate
= BS_STOP
;
7126 goto cp0_unimplemented
;
7132 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7136 goto cp0_unimplemented
;
7146 check_insn(ctx
, ISA_MIPS32R2
);
7147 gen_helper_mtc0_ebase(cpu_env
, arg
);
7151 goto cp0_unimplemented
;
7157 gen_helper_mtc0_config0(cpu_env
, arg
);
7159 /* Stop translation as we may have switched the execution mode */
7160 ctx
->bstate
= BS_STOP
;
7163 /* ignored, read only */
7167 gen_helper_mtc0_config2(cpu_env
, arg
);
7169 /* Stop translation as we may have switched the execution mode */
7170 ctx
->bstate
= BS_STOP
;
7173 gen_helper_mtc0_config3(cpu_env
, arg
);
7175 /* Stop translation as we may have switched the execution mode */
7176 ctx
->bstate
= BS_STOP
;
7179 /* currently ignored */
7183 gen_helper_mtc0_config5(cpu_env
, arg
);
7185 /* Stop translation as we may have switched the execution mode */
7186 ctx
->bstate
= BS_STOP
;
7188 /* 6,7 are implementation dependent */
7190 rn
= "Invalid config selector";
7191 goto cp0_unimplemented
;
7197 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7201 goto cp0_unimplemented
;
7207 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7211 goto cp0_unimplemented
;
7217 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7221 goto cp0_unimplemented
;
7227 check_insn(ctx
, ISA_MIPS3
);
7228 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7232 goto cp0_unimplemented
;
7236 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7237 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7240 gen_helper_mtc0_framemask(cpu_env
, arg
);
7244 goto cp0_unimplemented
;
7249 rn
= "Diagnostic"; /* implementation dependent */
7254 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7255 /* BS_STOP isn't good enough here, hflags may have changed. */
7256 gen_save_pc(ctx
->pc
+ 4);
7257 ctx
->bstate
= BS_EXCP
;
7261 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7262 /* Stop translation as we may have switched the execution mode */
7263 ctx
->bstate
= BS_STOP
;
7264 rn
= "TraceControl";
7267 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7268 /* Stop translation as we may have switched the execution mode */
7269 ctx
->bstate
= BS_STOP
;
7270 rn
= "TraceControl2";
7273 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7274 /* Stop translation as we may have switched the execution mode */
7275 ctx
->bstate
= BS_STOP
;
7276 rn
= "UserTraceData";
7279 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7280 /* Stop translation as we may have switched the execution mode */
7281 ctx
->bstate
= BS_STOP
;
7285 goto cp0_unimplemented
;
7292 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7296 goto cp0_unimplemented
;
7302 gen_helper_mtc0_performance0(cpu_env
, arg
);
7303 rn
= "Performance0";
7306 // gen_helper_mtc0_performance1(cpu_env, arg);
7307 rn
= "Performance1";
7310 // gen_helper_mtc0_performance2(cpu_env, arg);
7311 rn
= "Performance2";
7314 // gen_helper_mtc0_performance3(cpu_env, arg);
7315 rn
= "Performance3";
7318 // gen_helper_mtc0_performance4(cpu_env, arg);
7319 rn
= "Performance4";
7322 // gen_helper_mtc0_performance5(cpu_env, arg);
7323 rn
= "Performance5";
7326 // gen_helper_mtc0_performance6(cpu_env, arg);
7327 rn
= "Performance6";
7330 // gen_helper_mtc0_performance7(cpu_env, arg);
7331 rn
= "Performance7";
7334 goto cp0_unimplemented
;
7348 goto cp0_unimplemented
;
7357 gen_helper_mtc0_taglo(cpu_env
, arg
);
7364 gen_helper_mtc0_datalo(cpu_env
, arg
);
7368 goto cp0_unimplemented
;
7377 gen_helper_mtc0_taghi(cpu_env
, arg
);
7384 gen_helper_mtc0_datahi(cpu_env
, arg
);
7389 goto cp0_unimplemented
;
7395 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7399 goto cp0_unimplemented
;
7406 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7410 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7411 tcg_gen_st_tl(arg
, cpu_env
,
7412 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7416 goto cp0_unimplemented
;
7418 /* Stop translation as we may have switched the execution mode */
7419 ctx
->bstate
= BS_STOP
;
7422 goto cp0_unimplemented
;
7424 (void)rn
; /* avoid a compiler warning */
7425 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7426 /* For simplicity assume that all writes can cause interrupts. */
7427 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7429 ctx
->bstate
= BS_STOP
;
7434 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7436 #endif /* TARGET_MIPS64 */
7438 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7439 int u
, int sel
, int h
)
7441 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7442 TCGv t0
= tcg_temp_local_new();
7444 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7445 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7446 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7447 tcg_gen_movi_tl(t0
, -1);
7448 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7449 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7450 tcg_gen_movi_tl(t0
, -1);
7456 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7459 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7469 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7472 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7475 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7478 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7481 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7484 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7487 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7490 gen_mfc0(ctx
, t0
, rt
, sel
);
7497 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7500 gen_mfc0(ctx
, t0
, rt
, sel
);
7506 gen_helper_mftc0_status(t0
, cpu_env
);
7509 gen_mfc0(ctx
, t0
, rt
, sel
);
7515 gen_helper_mftc0_cause(t0
, cpu_env
);
7525 gen_helper_mftc0_epc(t0
, cpu_env
);
7535 gen_helper_mftc0_ebase(t0
, cpu_env
);
7545 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7555 gen_helper_mftc0_debug(t0
, cpu_env
);
7558 gen_mfc0(ctx
, t0
, rt
, sel
);
7563 gen_mfc0(ctx
, t0
, rt
, sel
);
7565 } else switch (sel
) {
7566 /* GPR registers. */
7568 gen_helper_1e0i(mftgpr
, t0
, rt
);
7570 /* Auxiliary CPU registers */
7574 gen_helper_1e0i(mftlo
, t0
, 0);
7577 gen_helper_1e0i(mfthi
, t0
, 0);
7580 gen_helper_1e0i(mftacx
, t0
, 0);
7583 gen_helper_1e0i(mftlo
, t0
, 1);
7586 gen_helper_1e0i(mfthi
, t0
, 1);
7589 gen_helper_1e0i(mftacx
, t0
, 1);
7592 gen_helper_1e0i(mftlo
, t0
, 2);
7595 gen_helper_1e0i(mfthi
, t0
, 2);
7598 gen_helper_1e0i(mftacx
, t0
, 2);
7601 gen_helper_1e0i(mftlo
, t0
, 3);
7604 gen_helper_1e0i(mfthi
, t0
, 3);
7607 gen_helper_1e0i(mftacx
, t0
, 3);
7610 gen_helper_mftdsp(t0
, cpu_env
);
7616 /* Floating point (COP1). */
7618 /* XXX: For now we support only a single FPU context. */
7620 TCGv_i32 fp0
= tcg_temp_new_i32();
7622 gen_load_fpr32(ctx
, fp0
, rt
);
7623 tcg_gen_ext_i32_tl(t0
, fp0
);
7624 tcg_temp_free_i32(fp0
);
7626 TCGv_i32 fp0
= tcg_temp_new_i32();
7628 gen_load_fpr32h(ctx
, fp0
, rt
);
7629 tcg_gen_ext_i32_tl(t0
, fp0
);
7630 tcg_temp_free_i32(fp0
);
7634 /* XXX: For now we support only a single FPU context. */
7635 gen_helper_1e0i(cfc1
, t0
, rt
);
7637 /* COP2: Not implemented. */
7644 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7645 gen_store_gpr(t0
, rd
);
7651 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7652 generate_exception_end(ctx
, EXCP_RI
);
7655 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7656 int u
, int sel
, int h
)
7658 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7659 TCGv t0
= tcg_temp_local_new();
7661 gen_load_gpr(t0
, rt
);
7662 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7663 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7664 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7666 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7667 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7674 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7677 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7687 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7690 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7693 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7696 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7699 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7702 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7705 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7708 gen_mtc0(ctx
, t0
, rd
, sel
);
7715 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7718 gen_mtc0(ctx
, t0
, rd
, sel
);
7724 gen_helper_mttc0_status(cpu_env
, t0
);
7727 gen_mtc0(ctx
, t0
, rd
, sel
);
7733 gen_helper_mttc0_cause(cpu_env
, t0
);
7743 gen_helper_mttc0_ebase(cpu_env
, t0
);
7753 gen_helper_mttc0_debug(cpu_env
, t0
);
7756 gen_mtc0(ctx
, t0
, rd
, sel
);
7761 gen_mtc0(ctx
, t0
, rd
, sel
);
7763 } else switch (sel
) {
7764 /* GPR registers. */
7766 gen_helper_0e1i(mttgpr
, t0
, rd
);
7768 /* Auxiliary CPU registers */
7772 gen_helper_0e1i(mttlo
, t0
, 0);
7775 gen_helper_0e1i(mtthi
, t0
, 0);
7778 gen_helper_0e1i(mttacx
, t0
, 0);
7781 gen_helper_0e1i(mttlo
, t0
, 1);
7784 gen_helper_0e1i(mtthi
, t0
, 1);
7787 gen_helper_0e1i(mttacx
, t0
, 1);
7790 gen_helper_0e1i(mttlo
, t0
, 2);
7793 gen_helper_0e1i(mtthi
, t0
, 2);
7796 gen_helper_0e1i(mttacx
, t0
, 2);
7799 gen_helper_0e1i(mttlo
, t0
, 3);
7802 gen_helper_0e1i(mtthi
, t0
, 3);
7805 gen_helper_0e1i(mttacx
, t0
, 3);
7808 gen_helper_mttdsp(cpu_env
, t0
);
7814 /* Floating point (COP1). */
7816 /* XXX: For now we support only a single FPU context. */
7818 TCGv_i32 fp0
= tcg_temp_new_i32();
7820 tcg_gen_trunc_tl_i32(fp0
, t0
);
7821 gen_store_fpr32(ctx
, fp0
, rd
);
7822 tcg_temp_free_i32(fp0
);
7824 TCGv_i32 fp0
= tcg_temp_new_i32();
7826 tcg_gen_trunc_tl_i32(fp0
, t0
);
7827 gen_store_fpr32h(ctx
, fp0
, rd
);
7828 tcg_temp_free_i32(fp0
);
7832 /* XXX: For now we support only a single FPU context. */
7834 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7836 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7837 tcg_temp_free_i32(fs_tmp
);
7839 /* Stop translation as we may have changed hflags */
7840 ctx
->bstate
= BS_STOP
;
7842 /* COP2: Not implemented. */
7849 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7855 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
7856 generate_exception_end(ctx
, EXCP_RI
);
7859 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
7861 const char *opn
= "ldst";
7863 check_cp0_enabled(ctx
);
7870 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7875 TCGv t0
= tcg_temp_new();
7877 gen_load_gpr(t0
, rt
);
7878 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7883 #if defined(TARGET_MIPS64)
7885 check_insn(ctx
, ISA_MIPS3
);
7890 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7894 check_insn(ctx
, ISA_MIPS3
);
7896 TCGv t0
= tcg_temp_new();
7898 gen_load_gpr(t0
, rt
);
7899 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7911 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
7917 TCGv t0
= tcg_temp_new();
7918 gen_load_gpr(t0
, rt
);
7919 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
7925 check_insn(ctx
, ASE_MT
);
7930 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
7931 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7935 check_insn(ctx
, ASE_MT
);
7936 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
7937 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
7942 if (!env
->tlb
->helper_tlbwi
)
7944 gen_helper_tlbwi(cpu_env
);
7949 if (!env
->tlb
->helper_tlbinv
) {
7952 gen_helper_tlbinv(cpu_env
);
7953 } /* treat as nop if TLBINV not supported */
7958 if (!env
->tlb
->helper_tlbinvf
) {
7961 gen_helper_tlbinvf(cpu_env
);
7962 } /* treat as nop if TLBINV not supported */
7966 if (!env
->tlb
->helper_tlbwr
)
7968 gen_helper_tlbwr(cpu_env
);
7972 if (!env
->tlb
->helper_tlbp
)
7974 gen_helper_tlbp(cpu_env
);
7978 if (!env
->tlb
->helper_tlbr
)
7980 gen_helper_tlbr(cpu_env
);
7982 case OPC_ERET
: /* OPC_ERETNC */
7983 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
7984 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
7987 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
7988 if (ctx
->opcode
& (1 << bit_shift
)) {
7991 check_insn(ctx
, ISA_MIPS32R5
);
7992 gen_helper_eretnc(cpu_env
);
7996 check_insn(ctx
, ISA_MIPS2
);
7997 gen_helper_eret(cpu_env
);
7999 ctx
->bstate
= BS_EXCP
;
8004 check_insn(ctx
, ISA_MIPS32
);
8005 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8006 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8009 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8011 generate_exception_end(ctx
, EXCP_RI
);
8013 gen_helper_deret(cpu_env
);
8014 ctx
->bstate
= BS_EXCP
;
8019 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8020 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8021 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8024 /* If we get an exception, we want to restart at next instruction */
8026 save_cpu_state(ctx
, 1);
8028 gen_helper_wait(cpu_env
);
8029 ctx
->bstate
= BS_EXCP
;
8034 generate_exception_end(ctx
, EXCP_RI
);
8037 (void)opn
; /* avoid a compiler warning */
8039 #endif /* !CONFIG_USER_ONLY */
8041 /* CP1 Branches (before delay slot) */
8042 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8043 int32_t cc
, int32_t offset
)
8045 target_ulong btarget
;
8046 TCGv_i32 t0
= tcg_temp_new_i32();
8048 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8049 generate_exception_end(ctx
, EXCP_RI
);
8054 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8056 btarget
= ctx
->pc
+ 4 + offset
;
8060 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8061 tcg_gen_not_i32(t0
, t0
);
8062 tcg_gen_andi_i32(t0
, t0
, 1);
8063 tcg_gen_extu_i32_tl(bcond
, t0
);
8066 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8067 tcg_gen_not_i32(t0
, t0
);
8068 tcg_gen_andi_i32(t0
, t0
, 1);
8069 tcg_gen_extu_i32_tl(bcond
, t0
);
8072 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8073 tcg_gen_andi_i32(t0
, t0
, 1);
8074 tcg_gen_extu_i32_tl(bcond
, t0
);
8077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8078 tcg_gen_andi_i32(t0
, t0
, 1);
8079 tcg_gen_extu_i32_tl(bcond
, t0
);
8081 ctx
->hflags
|= MIPS_HFLAG_BL
;
8085 TCGv_i32 t1
= tcg_temp_new_i32();
8086 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8087 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8088 tcg_gen_nand_i32(t0
, t0
, t1
);
8089 tcg_temp_free_i32(t1
);
8090 tcg_gen_andi_i32(t0
, t0
, 1);
8091 tcg_gen_extu_i32_tl(bcond
, t0
);
8096 TCGv_i32 t1
= tcg_temp_new_i32();
8097 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8098 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8099 tcg_gen_or_i32(t0
, t0
, t1
);
8100 tcg_temp_free_i32(t1
);
8101 tcg_gen_andi_i32(t0
, t0
, 1);
8102 tcg_gen_extu_i32_tl(bcond
, t0
);
8107 TCGv_i32 t1
= tcg_temp_new_i32();
8108 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8109 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8110 tcg_gen_and_i32(t0
, t0
, t1
);
8111 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8112 tcg_gen_and_i32(t0
, t0
, t1
);
8113 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8114 tcg_gen_nand_i32(t0
, t0
, t1
);
8115 tcg_temp_free_i32(t1
);
8116 tcg_gen_andi_i32(t0
, t0
, 1);
8117 tcg_gen_extu_i32_tl(bcond
, t0
);
8122 TCGv_i32 t1
= tcg_temp_new_i32();
8123 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8124 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8125 tcg_gen_or_i32(t0
, t0
, t1
);
8126 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8127 tcg_gen_or_i32(t0
, t0
, t1
);
8128 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8129 tcg_gen_or_i32(t0
, t0
, t1
);
8130 tcg_temp_free_i32(t1
);
8131 tcg_gen_andi_i32(t0
, t0
, 1);
8132 tcg_gen_extu_i32_tl(bcond
, t0
);
8135 ctx
->hflags
|= MIPS_HFLAG_BC
;
8138 MIPS_INVAL("cp1 cond branch");
8139 generate_exception_end(ctx
, EXCP_RI
);
8142 ctx
->btarget
= btarget
;
8143 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8145 tcg_temp_free_i32(t0
);
8148 /* R6 CP1 Branches */
8149 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8150 int32_t ft
, int32_t offset
,
8153 target_ulong btarget
;
8154 TCGv_i64 t0
= tcg_temp_new_i64();
8156 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8157 #ifdef MIPS_DEBUG_DISAS
8158 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8161 generate_exception_end(ctx
, EXCP_RI
);
8165 gen_load_fpr64(ctx
, t0
, ft
);
8166 tcg_gen_andi_i64(t0
, t0
, 1);
8168 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8172 tcg_gen_xori_i64(t0
, t0
, 1);
8173 ctx
->hflags
|= MIPS_HFLAG_BC
;
8176 /* t0 already set */
8177 ctx
->hflags
|= MIPS_HFLAG_BC
;
8180 MIPS_INVAL("cp1 cond branch");
8181 generate_exception_end(ctx
, EXCP_RI
);
8185 tcg_gen_trunc_i64_tl(bcond
, t0
);
8187 ctx
->btarget
= btarget
;
8189 switch (delayslot_size
) {
8191 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8194 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8199 tcg_temp_free_i64(t0
);
8202 /* Coprocessor 1 (FPU) */
8204 #define FOP(func, fmt) (((fmt) << 21) | (func))
8207 OPC_ADD_S
= FOP(0, FMT_S
),
8208 OPC_SUB_S
= FOP(1, FMT_S
),
8209 OPC_MUL_S
= FOP(2, FMT_S
),
8210 OPC_DIV_S
= FOP(3, FMT_S
),
8211 OPC_SQRT_S
= FOP(4, FMT_S
),
8212 OPC_ABS_S
= FOP(5, FMT_S
),
8213 OPC_MOV_S
= FOP(6, FMT_S
),
8214 OPC_NEG_S
= FOP(7, FMT_S
),
8215 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8216 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8217 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8218 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8219 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8220 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8221 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8222 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8223 OPC_SEL_S
= FOP(16, FMT_S
),
8224 OPC_MOVCF_S
= FOP(17, FMT_S
),
8225 OPC_MOVZ_S
= FOP(18, FMT_S
),
8226 OPC_MOVN_S
= FOP(19, FMT_S
),
8227 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8228 OPC_RECIP_S
= FOP(21, FMT_S
),
8229 OPC_RSQRT_S
= FOP(22, FMT_S
),
8230 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8231 OPC_MADDF_S
= FOP(24, FMT_S
),
8232 OPC_MSUBF_S
= FOP(25, FMT_S
),
8233 OPC_RINT_S
= FOP(26, FMT_S
),
8234 OPC_CLASS_S
= FOP(27, FMT_S
),
8235 OPC_MIN_S
= FOP(28, FMT_S
),
8236 OPC_RECIP2_S
= FOP(28, FMT_S
),
8237 OPC_MINA_S
= FOP(29, FMT_S
),
8238 OPC_RECIP1_S
= FOP(29, FMT_S
),
8239 OPC_MAX_S
= FOP(30, FMT_S
),
8240 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8241 OPC_MAXA_S
= FOP(31, FMT_S
),
8242 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8243 OPC_CVT_D_S
= FOP(33, FMT_S
),
8244 OPC_CVT_W_S
= FOP(36, FMT_S
),
8245 OPC_CVT_L_S
= FOP(37, FMT_S
),
8246 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8247 OPC_CMP_F_S
= FOP (48, FMT_S
),
8248 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8249 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8250 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8251 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8252 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8253 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8254 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8255 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8256 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8257 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8258 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8259 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8260 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8261 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8262 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8264 OPC_ADD_D
= FOP(0, FMT_D
),
8265 OPC_SUB_D
= FOP(1, FMT_D
),
8266 OPC_MUL_D
= FOP(2, FMT_D
),
8267 OPC_DIV_D
= FOP(3, FMT_D
),
8268 OPC_SQRT_D
= FOP(4, FMT_D
),
8269 OPC_ABS_D
= FOP(5, FMT_D
),
8270 OPC_MOV_D
= FOP(6, FMT_D
),
8271 OPC_NEG_D
= FOP(7, FMT_D
),
8272 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8273 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8274 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8275 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8276 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8277 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8278 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8279 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8280 OPC_SEL_D
= FOP(16, FMT_D
),
8281 OPC_MOVCF_D
= FOP(17, FMT_D
),
8282 OPC_MOVZ_D
= FOP(18, FMT_D
),
8283 OPC_MOVN_D
= FOP(19, FMT_D
),
8284 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8285 OPC_RECIP_D
= FOP(21, FMT_D
),
8286 OPC_RSQRT_D
= FOP(22, FMT_D
),
8287 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8288 OPC_MADDF_D
= FOP(24, FMT_D
),
8289 OPC_MSUBF_D
= FOP(25, FMT_D
),
8290 OPC_RINT_D
= FOP(26, FMT_D
),
8291 OPC_CLASS_D
= FOP(27, FMT_D
),
8292 OPC_MIN_D
= FOP(28, FMT_D
),
8293 OPC_RECIP2_D
= FOP(28, FMT_D
),
8294 OPC_MINA_D
= FOP(29, FMT_D
),
8295 OPC_RECIP1_D
= FOP(29, FMT_D
),
8296 OPC_MAX_D
= FOP(30, FMT_D
),
8297 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8298 OPC_MAXA_D
= FOP(31, FMT_D
),
8299 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8300 OPC_CVT_S_D
= FOP(32, FMT_D
),
8301 OPC_CVT_W_D
= FOP(36, FMT_D
),
8302 OPC_CVT_L_D
= FOP(37, FMT_D
),
8303 OPC_CMP_F_D
= FOP (48, FMT_D
),
8304 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8305 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8306 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8307 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8308 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8309 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8310 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8311 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8312 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8313 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8314 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8315 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8316 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8317 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8318 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8320 OPC_CVT_S_W
= FOP(32, FMT_W
),
8321 OPC_CVT_D_W
= FOP(33, FMT_W
),
8322 OPC_CVT_S_L
= FOP(32, FMT_L
),
8323 OPC_CVT_D_L
= FOP(33, FMT_L
),
8324 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8326 OPC_ADD_PS
= FOP(0, FMT_PS
),
8327 OPC_SUB_PS
= FOP(1, FMT_PS
),
8328 OPC_MUL_PS
= FOP(2, FMT_PS
),
8329 OPC_DIV_PS
= FOP(3, FMT_PS
),
8330 OPC_ABS_PS
= FOP(5, FMT_PS
),
8331 OPC_MOV_PS
= FOP(6, FMT_PS
),
8332 OPC_NEG_PS
= FOP(7, FMT_PS
),
8333 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8334 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8335 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8336 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8337 OPC_MULR_PS
= FOP(26, FMT_PS
),
8338 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8339 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8340 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8341 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8343 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8344 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8345 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8346 OPC_PLL_PS
= FOP(44, FMT_PS
),
8347 OPC_PLU_PS
= FOP(45, FMT_PS
),
8348 OPC_PUL_PS
= FOP(46, FMT_PS
),
8349 OPC_PUU_PS
= FOP(47, FMT_PS
),
8350 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8351 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8352 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8353 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8354 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8355 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8356 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8357 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8358 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8359 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8360 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8361 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8362 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8363 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8364 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8365 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8369 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8370 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8371 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8372 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8373 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8374 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8375 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8376 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8377 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8378 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8379 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8380 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8381 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8382 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8383 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8384 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8385 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8386 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8387 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8388 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8389 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8390 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8392 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8393 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8394 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8395 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8396 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8397 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8398 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8399 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8400 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8401 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8402 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8403 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8404 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8405 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8406 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8407 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8408 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8409 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8410 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8411 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8412 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8413 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8415 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8417 TCGv t0
= tcg_temp_new();
8422 TCGv_i32 fp0
= tcg_temp_new_i32();
8424 gen_load_fpr32(ctx
, fp0
, fs
);
8425 tcg_gen_ext_i32_tl(t0
, fp0
);
8426 tcg_temp_free_i32(fp0
);
8428 gen_store_gpr(t0
, rt
);
8431 gen_load_gpr(t0
, rt
);
8433 TCGv_i32 fp0
= tcg_temp_new_i32();
8435 tcg_gen_trunc_tl_i32(fp0
, t0
);
8436 gen_store_fpr32(ctx
, fp0
, fs
);
8437 tcg_temp_free_i32(fp0
);
8441 gen_helper_1e0i(cfc1
, t0
, fs
);
8442 gen_store_gpr(t0
, rt
);
8445 gen_load_gpr(t0
, rt
);
8446 save_cpu_state(ctx
, 0);
8448 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8450 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8451 tcg_temp_free_i32(fs_tmp
);
8453 /* Stop translation as we may have changed hflags */
8454 ctx
->bstate
= BS_STOP
;
8456 #if defined(TARGET_MIPS64)
8458 gen_load_fpr64(ctx
, t0
, fs
);
8459 gen_store_gpr(t0
, rt
);
8462 gen_load_gpr(t0
, rt
);
8463 gen_store_fpr64(ctx
, t0
, fs
);
8468 TCGv_i32 fp0
= tcg_temp_new_i32();
8470 gen_load_fpr32h(ctx
, fp0
, fs
);
8471 tcg_gen_ext_i32_tl(t0
, fp0
);
8472 tcg_temp_free_i32(fp0
);
8474 gen_store_gpr(t0
, rt
);
8477 gen_load_gpr(t0
, rt
);
8479 TCGv_i32 fp0
= tcg_temp_new_i32();
8481 tcg_gen_trunc_tl_i32(fp0
, t0
);
8482 gen_store_fpr32h(ctx
, fp0
, fs
);
8483 tcg_temp_free_i32(fp0
);
8487 MIPS_INVAL("cp1 move");
8488 generate_exception_end(ctx
, EXCP_RI
);
8496 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8512 l1
= gen_new_label();
8513 t0
= tcg_temp_new_i32();
8514 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8515 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8516 tcg_temp_free_i32(t0
);
8518 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8520 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8525 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8529 TCGv_i32 t0
= tcg_temp_new_i32();
8530 TCGLabel
*l1
= gen_new_label();
8537 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8538 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8539 gen_load_fpr32(ctx
, t0
, fs
);
8540 gen_store_fpr32(ctx
, t0
, fd
);
8542 tcg_temp_free_i32(t0
);
8545 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8548 TCGv_i32 t0
= tcg_temp_new_i32();
8550 TCGLabel
*l1
= gen_new_label();
8557 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8558 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8559 tcg_temp_free_i32(t0
);
8560 fp0
= tcg_temp_new_i64();
8561 gen_load_fpr64(ctx
, fp0
, fs
);
8562 gen_store_fpr64(ctx
, fp0
, fd
);
8563 tcg_temp_free_i64(fp0
);
8567 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8571 TCGv_i32 t0
= tcg_temp_new_i32();
8572 TCGLabel
*l1
= gen_new_label();
8573 TCGLabel
*l2
= gen_new_label();
8580 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8581 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8582 gen_load_fpr32(ctx
, t0
, fs
);
8583 gen_store_fpr32(ctx
, t0
, fd
);
8586 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8587 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8588 gen_load_fpr32h(ctx
, t0
, fs
);
8589 gen_store_fpr32h(ctx
, t0
, fd
);
8590 tcg_temp_free_i32(t0
);
8594 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8597 TCGv_i32 t1
= tcg_const_i32(0);
8598 TCGv_i32 fp0
= tcg_temp_new_i32();
8599 TCGv_i32 fp1
= tcg_temp_new_i32();
8600 TCGv_i32 fp2
= tcg_temp_new_i32();
8601 gen_load_fpr32(ctx
, fp0
, fd
);
8602 gen_load_fpr32(ctx
, fp1
, ft
);
8603 gen_load_fpr32(ctx
, fp2
, fs
);
8607 tcg_gen_andi_i32(fp0
, fp0
, 1);
8608 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8611 tcg_gen_andi_i32(fp1
, fp1
, 1);
8612 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8615 tcg_gen_andi_i32(fp1
, fp1
, 1);
8616 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8619 MIPS_INVAL("gen_sel_s");
8620 generate_exception_end(ctx
, EXCP_RI
);
8624 gen_store_fpr32(ctx
, fp0
, fd
);
8625 tcg_temp_free_i32(fp2
);
8626 tcg_temp_free_i32(fp1
);
8627 tcg_temp_free_i32(fp0
);
8628 tcg_temp_free_i32(t1
);
8631 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8634 TCGv_i64 t1
= tcg_const_i64(0);
8635 TCGv_i64 fp0
= tcg_temp_new_i64();
8636 TCGv_i64 fp1
= tcg_temp_new_i64();
8637 TCGv_i64 fp2
= tcg_temp_new_i64();
8638 gen_load_fpr64(ctx
, fp0
, fd
);
8639 gen_load_fpr64(ctx
, fp1
, ft
);
8640 gen_load_fpr64(ctx
, fp2
, fs
);
8644 tcg_gen_andi_i64(fp0
, fp0
, 1);
8645 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8648 tcg_gen_andi_i64(fp1
, fp1
, 1);
8649 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8652 tcg_gen_andi_i64(fp1
, fp1
, 1);
8653 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8656 MIPS_INVAL("gen_sel_d");
8657 generate_exception_end(ctx
, EXCP_RI
);
8661 gen_store_fpr64(ctx
, fp0
, fd
);
8662 tcg_temp_free_i64(fp2
);
8663 tcg_temp_free_i64(fp1
);
8664 tcg_temp_free_i64(fp0
);
8665 tcg_temp_free_i64(t1
);
8668 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8669 int ft
, int fs
, int fd
, int cc
)
8671 uint32_t func
= ctx
->opcode
& 0x3f;
8675 TCGv_i32 fp0
= tcg_temp_new_i32();
8676 TCGv_i32 fp1
= tcg_temp_new_i32();
8678 gen_load_fpr32(ctx
, fp0
, fs
);
8679 gen_load_fpr32(ctx
, fp1
, ft
);
8680 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8681 tcg_temp_free_i32(fp1
);
8682 gen_store_fpr32(ctx
, fp0
, fd
);
8683 tcg_temp_free_i32(fp0
);
8688 TCGv_i32 fp0
= tcg_temp_new_i32();
8689 TCGv_i32 fp1
= tcg_temp_new_i32();
8691 gen_load_fpr32(ctx
, fp0
, fs
);
8692 gen_load_fpr32(ctx
, fp1
, ft
);
8693 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8694 tcg_temp_free_i32(fp1
);
8695 gen_store_fpr32(ctx
, fp0
, fd
);
8696 tcg_temp_free_i32(fp0
);
8701 TCGv_i32 fp0
= tcg_temp_new_i32();
8702 TCGv_i32 fp1
= tcg_temp_new_i32();
8704 gen_load_fpr32(ctx
, fp0
, fs
);
8705 gen_load_fpr32(ctx
, fp1
, ft
);
8706 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8707 tcg_temp_free_i32(fp1
);
8708 gen_store_fpr32(ctx
, fp0
, fd
);
8709 tcg_temp_free_i32(fp0
);
8714 TCGv_i32 fp0
= tcg_temp_new_i32();
8715 TCGv_i32 fp1
= tcg_temp_new_i32();
8717 gen_load_fpr32(ctx
, fp0
, fs
);
8718 gen_load_fpr32(ctx
, fp1
, ft
);
8719 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8720 tcg_temp_free_i32(fp1
);
8721 gen_store_fpr32(ctx
, fp0
, fd
);
8722 tcg_temp_free_i32(fp0
);
8727 TCGv_i32 fp0
= tcg_temp_new_i32();
8729 gen_load_fpr32(ctx
, fp0
, fs
);
8730 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8731 gen_store_fpr32(ctx
, fp0
, fd
);
8732 tcg_temp_free_i32(fp0
);
8737 TCGv_i32 fp0
= tcg_temp_new_i32();
8739 gen_load_fpr32(ctx
, fp0
, fs
);
8740 gen_helper_float_abs_s(fp0
, fp0
);
8741 gen_store_fpr32(ctx
, fp0
, fd
);
8742 tcg_temp_free_i32(fp0
);
8747 TCGv_i32 fp0
= tcg_temp_new_i32();
8749 gen_load_fpr32(ctx
, fp0
, fs
);
8750 gen_store_fpr32(ctx
, fp0
, fd
);
8751 tcg_temp_free_i32(fp0
);
8756 TCGv_i32 fp0
= tcg_temp_new_i32();
8758 gen_load_fpr32(ctx
, fp0
, fs
);
8759 gen_helper_float_chs_s(fp0
, fp0
);
8760 gen_store_fpr32(ctx
, fp0
, fd
);
8761 tcg_temp_free_i32(fp0
);
8765 check_cp1_64bitmode(ctx
);
8767 TCGv_i32 fp32
= tcg_temp_new_i32();
8768 TCGv_i64 fp64
= tcg_temp_new_i64();
8770 gen_load_fpr32(ctx
, fp32
, fs
);
8771 gen_helper_float_roundl_s(fp64
, cpu_env
, fp32
);
8772 tcg_temp_free_i32(fp32
);
8773 gen_store_fpr64(ctx
, fp64
, fd
);
8774 tcg_temp_free_i64(fp64
);
8778 check_cp1_64bitmode(ctx
);
8780 TCGv_i32 fp32
= tcg_temp_new_i32();
8781 TCGv_i64 fp64
= tcg_temp_new_i64();
8783 gen_load_fpr32(ctx
, fp32
, fs
);
8784 gen_helper_float_truncl_s(fp64
, cpu_env
, fp32
);
8785 tcg_temp_free_i32(fp32
);
8786 gen_store_fpr64(ctx
, fp64
, fd
);
8787 tcg_temp_free_i64(fp64
);
8791 check_cp1_64bitmode(ctx
);
8793 TCGv_i32 fp32
= tcg_temp_new_i32();
8794 TCGv_i64 fp64
= tcg_temp_new_i64();
8796 gen_load_fpr32(ctx
, fp32
, fs
);
8797 gen_helper_float_ceill_s(fp64
, cpu_env
, fp32
);
8798 tcg_temp_free_i32(fp32
);
8799 gen_store_fpr64(ctx
, fp64
, fd
);
8800 tcg_temp_free_i64(fp64
);
8804 check_cp1_64bitmode(ctx
);
8806 TCGv_i32 fp32
= tcg_temp_new_i32();
8807 TCGv_i64 fp64
= tcg_temp_new_i64();
8809 gen_load_fpr32(ctx
, fp32
, fs
);
8810 gen_helper_float_floorl_s(fp64
, cpu_env
, fp32
);
8811 tcg_temp_free_i32(fp32
);
8812 gen_store_fpr64(ctx
, fp64
, fd
);
8813 tcg_temp_free_i64(fp64
);
8818 TCGv_i32 fp0
= tcg_temp_new_i32();
8820 gen_load_fpr32(ctx
, fp0
, fs
);
8821 gen_helper_float_roundw_s(fp0
, cpu_env
, fp0
);
8822 gen_store_fpr32(ctx
, fp0
, fd
);
8823 tcg_temp_free_i32(fp0
);
8828 TCGv_i32 fp0
= tcg_temp_new_i32();
8830 gen_load_fpr32(ctx
, fp0
, fs
);
8831 gen_helper_float_truncw_s(fp0
, cpu_env
, fp0
);
8832 gen_store_fpr32(ctx
, fp0
, fd
);
8833 tcg_temp_free_i32(fp0
);
8838 TCGv_i32 fp0
= tcg_temp_new_i32();
8840 gen_load_fpr32(ctx
, fp0
, fs
);
8841 gen_helper_float_ceilw_s(fp0
, cpu_env
, fp0
);
8842 gen_store_fpr32(ctx
, fp0
, fd
);
8843 tcg_temp_free_i32(fp0
);
8848 TCGv_i32 fp0
= tcg_temp_new_i32();
8850 gen_load_fpr32(ctx
, fp0
, fs
);
8851 gen_helper_float_floorw_s(fp0
, cpu_env
, fp0
);
8852 gen_store_fpr32(ctx
, fp0
, fd
);
8853 tcg_temp_free_i32(fp0
);
8857 check_insn(ctx
, ISA_MIPS32R6
);
8858 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8861 check_insn(ctx
, ISA_MIPS32R6
);
8862 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8865 check_insn(ctx
, ISA_MIPS32R6
);
8866 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
8869 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8870 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
8873 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8875 TCGLabel
*l1
= gen_new_label();
8879 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
8881 fp0
= tcg_temp_new_i32();
8882 gen_load_fpr32(ctx
, fp0
, fs
);
8883 gen_store_fpr32(ctx
, fp0
, fd
);
8884 tcg_temp_free_i32(fp0
);
8889 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
8891 TCGLabel
*l1
= gen_new_label();
8895 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
8896 fp0
= tcg_temp_new_i32();
8897 gen_load_fpr32(ctx
, fp0
, fs
);
8898 gen_store_fpr32(ctx
, fp0
, fd
);
8899 tcg_temp_free_i32(fp0
);
8906 TCGv_i32 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
8910 gen_store_fpr32(ctx
, fp0
, fd
);
8911 tcg_temp_free_i32(fp0
);
8916 TCGv_i32 fp0
= tcg_temp_new_i32();
8918 gen_load_fpr32(ctx
, fp0
, fs
);
8919 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
8920 gen_store_fpr32(ctx
, fp0
, fd
);
8921 tcg_temp_free_i32(fp0
);
8925 check_insn(ctx
, ISA_MIPS32R6
);
8927 TCGv_i32 fp0
= tcg_temp_new_i32();
8928 TCGv_i32 fp1
= tcg_temp_new_i32();
8929 TCGv_i32 fp2
= tcg_temp_new_i32();
8930 gen_load_fpr32(ctx
, fp0
, fs
);
8931 gen_load_fpr32(ctx
, fp1
, ft
);
8932 gen_load_fpr32(ctx
, fp2
, fd
);
8933 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8934 gen_store_fpr32(ctx
, fp2
, fd
);
8935 tcg_temp_free_i32(fp2
);
8936 tcg_temp_free_i32(fp1
);
8937 tcg_temp_free_i32(fp0
);
8941 check_insn(ctx
, ISA_MIPS32R6
);
8943 TCGv_i32 fp0
= tcg_temp_new_i32();
8944 TCGv_i32 fp1
= tcg_temp_new_i32();
8945 TCGv_i32 fp2
= tcg_temp_new_i32();
8946 gen_load_fpr32(ctx
, fp0
, fs
);
8947 gen_load_fpr32(ctx
, fp1
, ft
);
8948 gen_load_fpr32(ctx
, fp2
, fd
);
8949 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
8950 gen_store_fpr32(ctx
, fp2
, fd
);
8951 tcg_temp_free_i32(fp2
);
8952 tcg_temp_free_i32(fp1
);
8953 tcg_temp_free_i32(fp0
);
8957 check_insn(ctx
, ISA_MIPS32R6
);
8959 TCGv_i32 fp0
= tcg_temp_new_i32();
8960 gen_load_fpr32(ctx
, fp0
, fs
);
8961 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
8962 gen_store_fpr32(ctx
, fp0
, fd
);
8963 tcg_temp_free_i32(fp0
);
8967 check_insn(ctx
, ISA_MIPS32R6
);
8969 TCGv_i32 fp0
= tcg_temp_new_i32();
8970 gen_load_fpr32(ctx
, fp0
, fs
);
8971 gen_helper_float_class_s(fp0
, fp0
);
8972 gen_store_fpr32(ctx
, fp0
, fd
);
8973 tcg_temp_free_i32(fp0
);
8976 case OPC_MIN_S
: /* OPC_RECIP2_S */
8977 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
8979 TCGv_i32 fp0
= tcg_temp_new_i32();
8980 TCGv_i32 fp1
= tcg_temp_new_i32();
8981 TCGv_i32 fp2
= tcg_temp_new_i32();
8982 gen_load_fpr32(ctx
, fp0
, fs
);
8983 gen_load_fpr32(ctx
, fp1
, ft
);
8984 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
8985 gen_store_fpr32(ctx
, fp2
, fd
);
8986 tcg_temp_free_i32(fp2
);
8987 tcg_temp_free_i32(fp1
);
8988 tcg_temp_free_i32(fp0
);
8991 check_cp1_64bitmode(ctx
);
8993 TCGv_i32 fp0
= tcg_temp_new_i32();
8994 TCGv_i32 fp1
= tcg_temp_new_i32();
8996 gen_load_fpr32(ctx
, fp0
, fs
);
8997 gen_load_fpr32(ctx
, fp1
, ft
);
8998 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
8999 tcg_temp_free_i32(fp1
);
9000 gen_store_fpr32(ctx
, fp0
, fd
);
9001 tcg_temp_free_i32(fp0
);
9005 case OPC_MINA_S
: /* OPC_RECIP1_S */
9006 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9008 TCGv_i32 fp0
= tcg_temp_new_i32();
9009 TCGv_i32 fp1
= tcg_temp_new_i32();
9010 TCGv_i32 fp2
= tcg_temp_new_i32();
9011 gen_load_fpr32(ctx
, fp0
, fs
);
9012 gen_load_fpr32(ctx
, fp1
, ft
);
9013 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9014 gen_store_fpr32(ctx
, fp2
, fd
);
9015 tcg_temp_free_i32(fp2
);
9016 tcg_temp_free_i32(fp1
);
9017 tcg_temp_free_i32(fp0
);
9020 check_cp1_64bitmode(ctx
);
9022 TCGv_i32 fp0
= tcg_temp_new_i32();
9024 gen_load_fpr32(ctx
, fp0
, fs
);
9025 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9026 gen_store_fpr32(ctx
, fp0
, fd
);
9027 tcg_temp_free_i32(fp0
);
9031 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9032 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9034 TCGv_i32 fp0
= tcg_temp_new_i32();
9035 TCGv_i32 fp1
= tcg_temp_new_i32();
9036 gen_load_fpr32(ctx
, fp0
, fs
);
9037 gen_load_fpr32(ctx
, fp1
, ft
);
9038 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9039 gen_store_fpr32(ctx
, fp1
, fd
);
9040 tcg_temp_free_i32(fp1
);
9041 tcg_temp_free_i32(fp0
);
9044 check_cp1_64bitmode(ctx
);
9046 TCGv_i32 fp0
= tcg_temp_new_i32();
9048 gen_load_fpr32(ctx
, fp0
, fs
);
9049 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9050 gen_store_fpr32(ctx
, fp0
, fd
);
9051 tcg_temp_free_i32(fp0
);
9055 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9056 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9058 TCGv_i32 fp0
= tcg_temp_new_i32();
9059 TCGv_i32 fp1
= tcg_temp_new_i32();
9060 gen_load_fpr32(ctx
, fp0
, fs
);
9061 gen_load_fpr32(ctx
, fp1
, ft
);
9062 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9063 gen_store_fpr32(ctx
, fp1
, fd
);
9064 tcg_temp_free_i32(fp1
);
9065 tcg_temp_free_i32(fp0
);
9068 check_cp1_64bitmode(ctx
);
9070 TCGv_i32 fp0
= tcg_temp_new_i32();
9071 TCGv_i32 fp1
= tcg_temp_new_i32();
9073 gen_load_fpr32(ctx
, fp0
, fs
);
9074 gen_load_fpr32(ctx
, fp1
, ft
);
9075 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9076 tcg_temp_free_i32(fp1
);
9077 gen_store_fpr32(ctx
, fp0
, fd
);
9078 tcg_temp_free_i32(fp0
);
9083 check_cp1_registers(ctx
, fd
);
9085 TCGv_i32 fp32
= tcg_temp_new_i32();
9086 TCGv_i64 fp64
= tcg_temp_new_i64();
9088 gen_load_fpr32(ctx
, fp32
, fs
);
9089 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9090 tcg_temp_free_i32(fp32
);
9091 gen_store_fpr64(ctx
, fp64
, fd
);
9092 tcg_temp_free_i64(fp64
);
9097 TCGv_i32 fp0
= tcg_temp_new_i32();
9099 gen_load_fpr32(ctx
, fp0
, fs
);
9100 gen_helper_float_cvtw_s(fp0
, cpu_env
, fp0
);
9101 gen_store_fpr32(ctx
, fp0
, fd
);
9102 tcg_temp_free_i32(fp0
);
9106 check_cp1_64bitmode(ctx
);
9108 TCGv_i32 fp32
= tcg_temp_new_i32();
9109 TCGv_i64 fp64
= tcg_temp_new_i64();
9111 gen_load_fpr32(ctx
, fp32
, fs
);
9112 gen_helper_float_cvtl_s(fp64
, cpu_env
, fp32
);
9113 tcg_temp_free_i32(fp32
);
9114 gen_store_fpr64(ctx
, fp64
, fd
);
9115 tcg_temp_free_i64(fp64
);
9121 TCGv_i64 fp64
= tcg_temp_new_i64();
9122 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9123 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9125 gen_load_fpr32(ctx
, fp32_0
, fs
);
9126 gen_load_fpr32(ctx
, fp32_1
, ft
);
9127 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9128 tcg_temp_free_i32(fp32_1
);
9129 tcg_temp_free_i32(fp32_0
);
9130 gen_store_fpr64(ctx
, fp64
, fd
);
9131 tcg_temp_free_i64(fp64
);
9143 case OPC_CMP_NGLE_S
:
9150 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9151 if (ctx
->opcode
& (1 << 6)) {
9152 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9154 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9158 check_cp1_registers(ctx
, fs
| ft
| fd
);
9160 TCGv_i64 fp0
= tcg_temp_new_i64();
9161 TCGv_i64 fp1
= tcg_temp_new_i64();
9163 gen_load_fpr64(ctx
, fp0
, fs
);
9164 gen_load_fpr64(ctx
, fp1
, ft
);
9165 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9166 tcg_temp_free_i64(fp1
);
9167 gen_store_fpr64(ctx
, fp0
, fd
);
9168 tcg_temp_free_i64(fp0
);
9172 check_cp1_registers(ctx
, fs
| ft
| fd
);
9174 TCGv_i64 fp0
= tcg_temp_new_i64();
9175 TCGv_i64 fp1
= tcg_temp_new_i64();
9177 gen_load_fpr64(ctx
, fp0
, fs
);
9178 gen_load_fpr64(ctx
, fp1
, ft
);
9179 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9180 tcg_temp_free_i64(fp1
);
9181 gen_store_fpr64(ctx
, fp0
, fd
);
9182 tcg_temp_free_i64(fp0
);
9186 check_cp1_registers(ctx
, fs
| ft
| fd
);
9188 TCGv_i64 fp0
= tcg_temp_new_i64();
9189 TCGv_i64 fp1
= tcg_temp_new_i64();
9191 gen_load_fpr64(ctx
, fp0
, fs
);
9192 gen_load_fpr64(ctx
, fp1
, ft
);
9193 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9194 tcg_temp_free_i64(fp1
);
9195 gen_store_fpr64(ctx
, fp0
, fd
);
9196 tcg_temp_free_i64(fp0
);
9200 check_cp1_registers(ctx
, fs
| ft
| fd
);
9202 TCGv_i64 fp0
= tcg_temp_new_i64();
9203 TCGv_i64 fp1
= tcg_temp_new_i64();
9205 gen_load_fpr64(ctx
, fp0
, fs
);
9206 gen_load_fpr64(ctx
, fp1
, ft
);
9207 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9208 tcg_temp_free_i64(fp1
);
9209 gen_store_fpr64(ctx
, fp0
, fd
);
9210 tcg_temp_free_i64(fp0
);
9214 check_cp1_registers(ctx
, fs
| fd
);
9216 TCGv_i64 fp0
= tcg_temp_new_i64();
9218 gen_load_fpr64(ctx
, fp0
, fs
);
9219 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9220 gen_store_fpr64(ctx
, fp0
, fd
);
9221 tcg_temp_free_i64(fp0
);
9225 check_cp1_registers(ctx
, fs
| fd
);
9227 TCGv_i64 fp0
= tcg_temp_new_i64();
9229 gen_load_fpr64(ctx
, fp0
, fs
);
9230 gen_helper_float_abs_d(fp0
, fp0
);
9231 gen_store_fpr64(ctx
, fp0
, fd
);
9232 tcg_temp_free_i64(fp0
);
9236 check_cp1_registers(ctx
, fs
| fd
);
9238 TCGv_i64 fp0
= tcg_temp_new_i64();
9240 gen_load_fpr64(ctx
, fp0
, fs
);
9241 gen_store_fpr64(ctx
, fp0
, fd
);
9242 tcg_temp_free_i64(fp0
);
9246 check_cp1_registers(ctx
, fs
| fd
);
9248 TCGv_i64 fp0
= tcg_temp_new_i64();
9250 gen_load_fpr64(ctx
, fp0
, fs
);
9251 gen_helper_float_chs_d(fp0
, fp0
);
9252 gen_store_fpr64(ctx
, fp0
, fd
);
9253 tcg_temp_free_i64(fp0
);
9257 check_cp1_64bitmode(ctx
);
9259 TCGv_i64 fp0
= tcg_temp_new_i64();
9261 gen_load_fpr64(ctx
, fp0
, fs
);
9262 gen_helper_float_roundl_d(fp0
, cpu_env
, fp0
);
9263 gen_store_fpr64(ctx
, fp0
, fd
);
9264 tcg_temp_free_i64(fp0
);
9268 check_cp1_64bitmode(ctx
);
9270 TCGv_i64 fp0
= tcg_temp_new_i64();
9272 gen_load_fpr64(ctx
, fp0
, fs
);
9273 gen_helper_float_truncl_d(fp0
, cpu_env
, fp0
);
9274 gen_store_fpr64(ctx
, fp0
, fd
);
9275 tcg_temp_free_i64(fp0
);
9279 check_cp1_64bitmode(ctx
);
9281 TCGv_i64 fp0
= tcg_temp_new_i64();
9283 gen_load_fpr64(ctx
, fp0
, fs
);
9284 gen_helper_float_ceill_d(fp0
, cpu_env
, fp0
);
9285 gen_store_fpr64(ctx
, fp0
, fd
);
9286 tcg_temp_free_i64(fp0
);
9290 check_cp1_64bitmode(ctx
);
9292 TCGv_i64 fp0
= tcg_temp_new_i64();
9294 gen_load_fpr64(ctx
, fp0
, fs
);
9295 gen_helper_float_floorl_d(fp0
, cpu_env
, fp0
);
9296 gen_store_fpr64(ctx
, fp0
, fd
);
9297 tcg_temp_free_i64(fp0
);
9301 check_cp1_registers(ctx
, fs
);
9303 TCGv_i32 fp32
= tcg_temp_new_i32();
9304 TCGv_i64 fp64
= tcg_temp_new_i64();
9306 gen_load_fpr64(ctx
, fp64
, fs
);
9307 gen_helper_float_roundw_d(fp32
, cpu_env
, fp64
);
9308 tcg_temp_free_i64(fp64
);
9309 gen_store_fpr32(ctx
, fp32
, fd
);
9310 tcg_temp_free_i32(fp32
);
9314 check_cp1_registers(ctx
, fs
);
9316 TCGv_i32 fp32
= tcg_temp_new_i32();
9317 TCGv_i64 fp64
= tcg_temp_new_i64();
9319 gen_load_fpr64(ctx
, fp64
, fs
);
9320 gen_helper_float_truncw_d(fp32
, cpu_env
, fp64
);
9321 tcg_temp_free_i64(fp64
);
9322 gen_store_fpr32(ctx
, fp32
, fd
);
9323 tcg_temp_free_i32(fp32
);
9327 check_cp1_registers(ctx
, fs
);
9329 TCGv_i32 fp32
= tcg_temp_new_i32();
9330 TCGv_i64 fp64
= tcg_temp_new_i64();
9332 gen_load_fpr64(ctx
, fp64
, fs
);
9333 gen_helper_float_ceilw_d(fp32
, cpu_env
, fp64
);
9334 tcg_temp_free_i64(fp64
);
9335 gen_store_fpr32(ctx
, fp32
, fd
);
9336 tcg_temp_free_i32(fp32
);
9340 check_cp1_registers(ctx
, fs
);
9342 TCGv_i32 fp32
= tcg_temp_new_i32();
9343 TCGv_i64 fp64
= tcg_temp_new_i64();
9345 gen_load_fpr64(ctx
, fp64
, fs
);
9346 gen_helper_float_floorw_d(fp32
, cpu_env
, fp64
);
9347 tcg_temp_free_i64(fp64
);
9348 gen_store_fpr32(ctx
, fp32
, fd
);
9349 tcg_temp_free_i32(fp32
);
9353 check_insn(ctx
, ISA_MIPS32R6
);
9354 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9357 check_insn(ctx
, ISA_MIPS32R6
);
9358 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9361 check_insn(ctx
, ISA_MIPS32R6
);
9362 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9365 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9366 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9369 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9371 TCGLabel
*l1
= gen_new_label();
9375 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9377 fp0
= tcg_temp_new_i64();
9378 gen_load_fpr64(ctx
, fp0
, fs
);
9379 gen_store_fpr64(ctx
, fp0
, fd
);
9380 tcg_temp_free_i64(fp0
);
9385 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9387 TCGLabel
*l1
= gen_new_label();
9391 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9392 fp0
= tcg_temp_new_i64();
9393 gen_load_fpr64(ctx
, fp0
, fs
);
9394 gen_store_fpr64(ctx
, fp0
, fd
);
9395 tcg_temp_free_i64(fp0
);
9401 check_cp1_registers(ctx
, fs
| fd
);
9403 TCGv_i64 fp0
= tcg_temp_new_i64();
9405 gen_load_fpr64(ctx
, fp0
, fs
);
9406 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9407 gen_store_fpr64(ctx
, fp0
, fd
);
9408 tcg_temp_free_i64(fp0
);
9412 check_cp1_registers(ctx
, fs
| fd
);
9414 TCGv_i64 fp0
= tcg_temp_new_i64();
9416 gen_load_fpr64(ctx
, fp0
, fs
);
9417 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9418 gen_store_fpr64(ctx
, fp0
, fd
);
9419 tcg_temp_free_i64(fp0
);
9423 check_insn(ctx
, ISA_MIPS32R6
);
9425 TCGv_i64 fp0
= tcg_temp_new_i64();
9426 TCGv_i64 fp1
= tcg_temp_new_i64();
9427 TCGv_i64 fp2
= tcg_temp_new_i64();
9428 gen_load_fpr64(ctx
, fp0
, fs
);
9429 gen_load_fpr64(ctx
, fp1
, ft
);
9430 gen_load_fpr64(ctx
, fp2
, fd
);
9431 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9432 gen_store_fpr64(ctx
, fp2
, fd
);
9433 tcg_temp_free_i64(fp2
);
9434 tcg_temp_free_i64(fp1
);
9435 tcg_temp_free_i64(fp0
);
9439 check_insn(ctx
, ISA_MIPS32R6
);
9441 TCGv_i64 fp0
= tcg_temp_new_i64();
9442 TCGv_i64 fp1
= tcg_temp_new_i64();
9443 TCGv_i64 fp2
= tcg_temp_new_i64();
9444 gen_load_fpr64(ctx
, fp0
, fs
);
9445 gen_load_fpr64(ctx
, fp1
, ft
);
9446 gen_load_fpr64(ctx
, fp2
, fd
);
9447 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9448 gen_store_fpr64(ctx
, fp2
, fd
);
9449 tcg_temp_free_i64(fp2
);
9450 tcg_temp_free_i64(fp1
);
9451 tcg_temp_free_i64(fp0
);
9455 check_insn(ctx
, ISA_MIPS32R6
);
9457 TCGv_i64 fp0
= tcg_temp_new_i64();
9458 gen_load_fpr64(ctx
, fp0
, fs
);
9459 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9460 gen_store_fpr64(ctx
, fp0
, fd
);
9461 tcg_temp_free_i64(fp0
);
9465 check_insn(ctx
, ISA_MIPS32R6
);
9467 TCGv_i64 fp0
= tcg_temp_new_i64();
9468 gen_load_fpr64(ctx
, fp0
, fs
);
9469 gen_helper_float_class_d(fp0
, fp0
);
9470 gen_store_fpr64(ctx
, fp0
, fd
);
9471 tcg_temp_free_i64(fp0
);
9474 case OPC_MIN_D
: /* OPC_RECIP2_D */
9475 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9477 TCGv_i64 fp0
= tcg_temp_new_i64();
9478 TCGv_i64 fp1
= tcg_temp_new_i64();
9479 gen_load_fpr64(ctx
, fp0
, fs
);
9480 gen_load_fpr64(ctx
, fp1
, ft
);
9481 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9482 gen_store_fpr64(ctx
, fp1
, fd
);
9483 tcg_temp_free_i64(fp1
);
9484 tcg_temp_free_i64(fp0
);
9487 check_cp1_64bitmode(ctx
);
9489 TCGv_i64 fp0
= tcg_temp_new_i64();
9490 TCGv_i64 fp1
= tcg_temp_new_i64();
9492 gen_load_fpr64(ctx
, fp0
, fs
);
9493 gen_load_fpr64(ctx
, fp1
, ft
);
9494 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9495 tcg_temp_free_i64(fp1
);
9496 gen_store_fpr64(ctx
, fp0
, fd
);
9497 tcg_temp_free_i64(fp0
);
9501 case OPC_MINA_D
: /* OPC_RECIP1_D */
9502 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9504 TCGv_i64 fp0
= tcg_temp_new_i64();
9505 TCGv_i64 fp1
= tcg_temp_new_i64();
9506 gen_load_fpr64(ctx
, fp0
, fs
);
9507 gen_load_fpr64(ctx
, fp1
, ft
);
9508 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9509 gen_store_fpr64(ctx
, fp1
, fd
);
9510 tcg_temp_free_i64(fp1
);
9511 tcg_temp_free_i64(fp0
);
9514 check_cp1_64bitmode(ctx
);
9516 TCGv_i64 fp0
= tcg_temp_new_i64();
9518 gen_load_fpr64(ctx
, fp0
, fs
);
9519 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9520 gen_store_fpr64(ctx
, fp0
, fd
);
9521 tcg_temp_free_i64(fp0
);
9525 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9526 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9528 TCGv_i64 fp0
= tcg_temp_new_i64();
9529 TCGv_i64 fp1
= tcg_temp_new_i64();
9530 gen_load_fpr64(ctx
, fp0
, fs
);
9531 gen_load_fpr64(ctx
, fp1
, ft
);
9532 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9533 gen_store_fpr64(ctx
, fp1
, fd
);
9534 tcg_temp_free_i64(fp1
);
9535 tcg_temp_free_i64(fp0
);
9538 check_cp1_64bitmode(ctx
);
9540 TCGv_i64 fp0
= tcg_temp_new_i64();
9542 gen_load_fpr64(ctx
, fp0
, fs
);
9543 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9544 gen_store_fpr64(ctx
, fp0
, fd
);
9545 tcg_temp_free_i64(fp0
);
9549 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9550 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9552 TCGv_i64 fp0
= tcg_temp_new_i64();
9553 TCGv_i64 fp1
= tcg_temp_new_i64();
9554 gen_load_fpr64(ctx
, fp0
, fs
);
9555 gen_load_fpr64(ctx
, fp1
, ft
);
9556 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9557 gen_store_fpr64(ctx
, fp1
, fd
);
9558 tcg_temp_free_i64(fp1
);
9559 tcg_temp_free_i64(fp0
);
9562 check_cp1_64bitmode(ctx
);
9564 TCGv_i64 fp0
= tcg_temp_new_i64();
9565 TCGv_i64 fp1
= tcg_temp_new_i64();
9567 gen_load_fpr64(ctx
, fp0
, fs
);
9568 gen_load_fpr64(ctx
, fp1
, ft
);
9569 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9570 tcg_temp_free_i64(fp1
);
9571 gen_store_fpr64(ctx
, fp0
, fd
);
9572 tcg_temp_free_i64(fp0
);
9585 case OPC_CMP_NGLE_D
:
9592 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9593 if (ctx
->opcode
& (1 << 6)) {
9594 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9596 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9600 check_cp1_registers(ctx
, fs
);
9602 TCGv_i32 fp32
= tcg_temp_new_i32();
9603 TCGv_i64 fp64
= tcg_temp_new_i64();
9605 gen_load_fpr64(ctx
, fp64
, fs
);
9606 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9607 tcg_temp_free_i64(fp64
);
9608 gen_store_fpr32(ctx
, fp32
, fd
);
9609 tcg_temp_free_i32(fp32
);
9613 check_cp1_registers(ctx
, fs
);
9615 TCGv_i32 fp32
= tcg_temp_new_i32();
9616 TCGv_i64 fp64
= tcg_temp_new_i64();
9618 gen_load_fpr64(ctx
, fp64
, fs
);
9619 gen_helper_float_cvtw_d(fp32
, cpu_env
, fp64
);
9620 tcg_temp_free_i64(fp64
);
9621 gen_store_fpr32(ctx
, fp32
, fd
);
9622 tcg_temp_free_i32(fp32
);
9626 check_cp1_64bitmode(ctx
);
9628 TCGv_i64 fp0
= tcg_temp_new_i64();
9630 gen_load_fpr64(ctx
, fp0
, fs
);
9631 gen_helper_float_cvtl_d(fp0
, cpu_env
, fp0
);
9632 gen_store_fpr64(ctx
, fp0
, fd
);
9633 tcg_temp_free_i64(fp0
);
9638 TCGv_i32 fp0
= tcg_temp_new_i32();
9640 gen_load_fpr32(ctx
, fp0
, fs
);
9641 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9642 gen_store_fpr32(ctx
, fp0
, fd
);
9643 tcg_temp_free_i32(fp0
);
9647 check_cp1_registers(ctx
, fd
);
9649 TCGv_i32 fp32
= tcg_temp_new_i32();
9650 TCGv_i64 fp64
= tcg_temp_new_i64();
9652 gen_load_fpr32(ctx
, fp32
, fs
);
9653 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9654 tcg_temp_free_i32(fp32
);
9655 gen_store_fpr64(ctx
, fp64
, fd
);
9656 tcg_temp_free_i64(fp64
);
9660 check_cp1_64bitmode(ctx
);
9662 TCGv_i32 fp32
= tcg_temp_new_i32();
9663 TCGv_i64 fp64
= tcg_temp_new_i64();
9665 gen_load_fpr64(ctx
, fp64
, fs
);
9666 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9667 tcg_temp_free_i64(fp64
);
9668 gen_store_fpr32(ctx
, fp32
, fd
);
9669 tcg_temp_free_i32(fp32
);
9673 check_cp1_64bitmode(ctx
);
9675 TCGv_i64 fp0
= tcg_temp_new_i64();
9677 gen_load_fpr64(ctx
, fp0
, fs
);
9678 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9679 gen_store_fpr64(ctx
, fp0
, fd
);
9680 tcg_temp_free_i64(fp0
);
9686 TCGv_i64 fp0
= tcg_temp_new_i64();
9688 gen_load_fpr64(ctx
, fp0
, fs
);
9689 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9690 gen_store_fpr64(ctx
, fp0
, fd
);
9691 tcg_temp_free_i64(fp0
);
9697 TCGv_i64 fp0
= tcg_temp_new_i64();
9698 TCGv_i64 fp1
= tcg_temp_new_i64();
9700 gen_load_fpr64(ctx
, fp0
, fs
);
9701 gen_load_fpr64(ctx
, fp1
, ft
);
9702 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9703 tcg_temp_free_i64(fp1
);
9704 gen_store_fpr64(ctx
, fp0
, fd
);
9705 tcg_temp_free_i64(fp0
);
9711 TCGv_i64 fp0
= tcg_temp_new_i64();
9712 TCGv_i64 fp1
= tcg_temp_new_i64();
9714 gen_load_fpr64(ctx
, fp0
, fs
);
9715 gen_load_fpr64(ctx
, fp1
, ft
);
9716 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9717 tcg_temp_free_i64(fp1
);
9718 gen_store_fpr64(ctx
, fp0
, fd
);
9719 tcg_temp_free_i64(fp0
);
9725 TCGv_i64 fp0
= tcg_temp_new_i64();
9726 TCGv_i64 fp1
= tcg_temp_new_i64();
9728 gen_load_fpr64(ctx
, fp0
, fs
);
9729 gen_load_fpr64(ctx
, fp1
, ft
);
9730 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9731 tcg_temp_free_i64(fp1
);
9732 gen_store_fpr64(ctx
, fp0
, fd
);
9733 tcg_temp_free_i64(fp0
);
9739 TCGv_i64 fp0
= tcg_temp_new_i64();
9741 gen_load_fpr64(ctx
, fp0
, fs
);
9742 gen_helper_float_abs_ps(fp0
, fp0
);
9743 gen_store_fpr64(ctx
, fp0
, fd
);
9744 tcg_temp_free_i64(fp0
);
9750 TCGv_i64 fp0
= tcg_temp_new_i64();
9752 gen_load_fpr64(ctx
, fp0
, fs
);
9753 gen_store_fpr64(ctx
, fp0
, fd
);
9754 tcg_temp_free_i64(fp0
);
9760 TCGv_i64 fp0
= tcg_temp_new_i64();
9762 gen_load_fpr64(ctx
, fp0
, fs
);
9763 gen_helper_float_chs_ps(fp0
, fp0
);
9764 gen_store_fpr64(ctx
, fp0
, fd
);
9765 tcg_temp_free_i64(fp0
);
9770 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9775 TCGLabel
*l1
= gen_new_label();
9779 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9780 fp0
= tcg_temp_new_i64();
9781 gen_load_fpr64(ctx
, fp0
, fs
);
9782 gen_store_fpr64(ctx
, fp0
, fd
);
9783 tcg_temp_free_i64(fp0
);
9790 TCGLabel
*l1
= gen_new_label();
9794 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9795 fp0
= tcg_temp_new_i64();
9796 gen_load_fpr64(ctx
, fp0
, fs
);
9797 gen_store_fpr64(ctx
, fp0
, fd
);
9798 tcg_temp_free_i64(fp0
);
9806 TCGv_i64 fp0
= tcg_temp_new_i64();
9807 TCGv_i64 fp1
= tcg_temp_new_i64();
9809 gen_load_fpr64(ctx
, fp0
, ft
);
9810 gen_load_fpr64(ctx
, fp1
, fs
);
9811 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
9812 tcg_temp_free_i64(fp1
);
9813 gen_store_fpr64(ctx
, fp0
, fd
);
9814 tcg_temp_free_i64(fp0
);
9820 TCGv_i64 fp0
= tcg_temp_new_i64();
9821 TCGv_i64 fp1
= tcg_temp_new_i64();
9823 gen_load_fpr64(ctx
, fp0
, ft
);
9824 gen_load_fpr64(ctx
, fp1
, fs
);
9825 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
9826 tcg_temp_free_i64(fp1
);
9827 gen_store_fpr64(ctx
, fp0
, fd
);
9828 tcg_temp_free_i64(fp0
);
9834 TCGv_i64 fp0
= tcg_temp_new_i64();
9835 TCGv_i64 fp1
= tcg_temp_new_i64();
9837 gen_load_fpr64(ctx
, fp0
, fs
);
9838 gen_load_fpr64(ctx
, fp1
, ft
);
9839 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
9840 tcg_temp_free_i64(fp1
);
9841 gen_store_fpr64(ctx
, fp0
, fd
);
9842 tcg_temp_free_i64(fp0
);
9848 TCGv_i64 fp0
= tcg_temp_new_i64();
9850 gen_load_fpr64(ctx
, fp0
, fs
);
9851 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
9852 gen_store_fpr64(ctx
, fp0
, fd
);
9853 tcg_temp_free_i64(fp0
);
9859 TCGv_i64 fp0
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp0
, fs
);
9862 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
9863 gen_store_fpr64(ctx
, fp0
, fd
);
9864 tcg_temp_free_i64(fp0
);
9870 TCGv_i64 fp0
= tcg_temp_new_i64();
9871 TCGv_i64 fp1
= tcg_temp_new_i64();
9873 gen_load_fpr64(ctx
, fp0
, fs
);
9874 gen_load_fpr64(ctx
, fp1
, ft
);
9875 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
9876 tcg_temp_free_i64(fp1
);
9877 gen_store_fpr64(ctx
, fp0
, fd
);
9878 tcg_temp_free_i64(fp0
);
9882 check_cp1_64bitmode(ctx
);
9884 TCGv_i32 fp0
= tcg_temp_new_i32();
9886 gen_load_fpr32h(ctx
, fp0
, fs
);
9887 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
9888 gen_store_fpr32(ctx
, fp0
, fd
);
9889 tcg_temp_free_i32(fp0
);
9895 TCGv_i64 fp0
= tcg_temp_new_i64();
9897 gen_load_fpr64(ctx
, fp0
, fs
);
9898 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
9899 gen_store_fpr64(ctx
, fp0
, fd
);
9900 tcg_temp_free_i64(fp0
);
9904 check_cp1_64bitmode(ctx
);
9906 TCGv_i32 fp0
= tcg_temp_new_i32();
9908 gen_load_fpr32(ctx
, fp0
, fs
);
9909 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
9910 gen_store_fpr32(ctx
, fp0
, fd
);
9911 tcg_temp_free_i32(fp0
);
9917 TCGv_i32 fp0
= tcg_temp_new_i32();
9918 TCGv_i32 fp1
= tcg_temp_new_i32();
9920 gen_load_fpr32(ctx
, fp0
, fs
);
9921 gen_load_fpr32(ctx
, fp1
, ft
);
9922 gen_store_fpr32h(ctx
, fp0
, fd
);
9923 gen_store_fpr32(ctx
, fp1
, fd
);
9924 tcg_temp_free_i32(fp0
);
9925 tcg_temp_free_i32(fp1
);
9931 TCGv_i32 fp0
= tcg_temp_new_i32();
9932 TCGv_i32 fp1
= tcg_temp_new_i32();
9934 gen_load_fpr32(ctx
, fp0
, fs
);
9935 gen_load_fpr32h(ctx
, fp1
, ft
);
9936 gen_store_fpr32(ctx
, fp1
, fd
);
9937 gen_store_fpr32h(ctx
, fp0
, fd
);
9938 tcg_temp_free_i32(fp0
);
9939 tcg_temp_free_i32(fp1
);
9945 TCGv_i32 fp0
= tcg_temp_new_i32();
9946 TCGv_i32 fp1
= tcg_temp_new_i32();
9948 gen_load_fpr32h(ctx
, fp0
, fs
);
9949 gen_load_fpr32(ctx
, fp1
, ft
);
9950 gen_store_fpr32(ctx
, fp1
, fd
);
9951 gen_store_fpr32h(ctx
, fp0
, fd
);
9952 tcg_temp_free_i32(fp0
);
9953 tcg_temp_free_i32(fp1
);
9959 TCGv_i32 fp0
= tcg_temp_new_i32();
9960 TCGv_i32 fp1
= tcg_temp_new_i32();
9962 gen_load_fpr32h(ctx
, fp0
, fs
);
9963 gen_load_fpr32h(ctx
, fp1
, ft
);
9964 gen_store_fpr32(ctx
, fp1
, fd
);
9965 gen_store_fpr32h(ctx
, fp0
, fd
);
9966 tcg_temp_free_i32(fp0
);
9967 tcg_temp_free_i32(fp1
);
9973 case OPC_CMP_UEQ_PS
:
9974 case OPC_CMP_OLT_PS
:
9975 case OPC_CMP_ULT_PS
:
9976 case OPC_CMP_OLE_PS
:
9977 case OPC_CMP_ULE_PS
:
9979 case OPC_CMP_NGLE_PS
:
9980 case OPC_CMP_SEQ_PS
:
9981 case OPC_CMP_NGL_PS
:
9983 case OPC_CMP_NGE_PS
:
9985 case OPC_CMP_NGT_PS
:
9986 if (ctx
->opcode
& (1 << 6)) {
9987 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
9989 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
9993 MIPS_INVAL("farith");
9994 generate_exception_end(ctx
, EXCP_RI
);
9999 /* Coprocessor 3 (FPU) */
10000 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10001 int fd
, int fs
, int base
, int index
)
10003 TCGv t0
= tcg_temp_new();
10006 gen_load_gpr(t0
, index
);
10007 } else if (index
== 0) {
10008 gen_load_gpr(t0
, base
);
10010 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10012 /* Don't do NOP if destination is zero: we must perform the actual
10018 TCGv_i32 fp0
= tcg_temp_new_i32();
10020 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10021 tcg_gen_trunc_tl_i32(fp0
, t0
);
10022 gen_store_fpr32(ctx
, fp0
, fd
);
10023 tcg_temp_free_i32(fp0
);
10028 check_cp1_registers(ctx
, fd
);
10030 TCGv_i64 fp0
= tcg_temp_new_i64();
10031 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10032 gen_store_fpr64(ctx
, fp0
, fd
);
10033 tcg_temp_free_i64(fp0
);
10037 check_cp1_64bitmode(ctx
);
10038 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10040 TCGv_i64 fp0
= tcg_temp_new_i64();
10042 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10043 gen_store_fpr64(ctx
, fp0
, fd
);
10044 tcg_temp_free_i64(fp0
);
10050 TCGv_i32 fp0
= tcg_temp_new_i32();
10051 gen_load_fpr32(ctx
, fp0
, fs
);
10052 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10053 tcg_temp_free_i32(fp0
);
10058 check_cp1_registers(ctx
, fs
);
10060 TCGv_i64 fp0
= tcg_temp_new_i64();
10061 gen_load_fpr64(ctx
, fp0
, fs
);
10062 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10063 tcg_temp_free_i64(fp0
);
10067 check_cp1_64bitmode(ctx
);
10068 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10070 TCGv_i64 fp0
= tcg_temp_new_i64();
10071 gen_load_fpr64(ctx
, fp0
, fs
);
10072 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10073 tcg_temp_free_i64(fp0
);
10080 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10081 int fd
, int fr
, int fs
, int ft
)
10087 TCGv t0
= tcg_temp_local_new();
10088 TCGv_i32 fp
= tcg_temp_new_i32();
10089 TCGv_i32 fph
= tcg_temp_new_i32();
10090 TCGLabel
*l1
= gen_new_label();
10091 TCGLabel
*l2
= gen_new_label();
10093 gen_load_gpr(t0
, fr
);
10094 tcg_gen_andi_tl(t0
, t0
, 0x7);
10096 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10097 gen_load_fpr32(ctx
, fp
, fs
);
10098 gen_load_fpr32h(ctx
, fph
, fs
);
10099 gen_store_fpr32(ctx
, fp
, fd
);
10100 gen_store_fpr32h(ctx
, fph
, fd
);
10103 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10105 #ifdef TARGET_WORDS_BIGENDIAN
10106 gen_load_fpr32(ctx
, fp
, fs
);
10107 gen_load_fpr32h(ctx
, fph
, ft
);
10108 gen_store_fpr32h(ctx
, fp
, fd
);
10109 gen_store_fpr32(ctx
, fph
, fd
);
10111 gen_load_fpr32h(ctx
, fph
, fs
);
10112 gen_load_fpr32(ctx
, fp
, ft
);
10113 gen_store_fpr32(ctx
, fph
, fd
);
10114 gen_store_fpr32h(ctx
, fp
, fd
);
10117 tcg_temp_free_i32(fp
);
10118 tcg_temp_free_i32(fph
);
10124 TCGv_i32 fp0
= tcg_temp_new_i32();
10125 TCGv_i32 fp1
= tcg_temp_new_i32();
10126 TCGv_i32 fp2
= tcg_temp_new_i32();
10128 gen_load_fpr32(ctx
, fp0
, fs
);
10129 gen_load_fpr32(ctx
, fp1
, ft
);
10130 gen_load_fpr32(ctx
, fp2
, fr
);
10131 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10132 tcg_temp_free_i32(fp0
);
10133 tcg_temp_free_i32(fp1
);
10134 gen_store_fpr32(ctx
, fp2
, fd
);
10135 tcg_temp_free_i32(fp2
);
10140 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10142 TCGv_i64 fp0
= tcg_temp_new_i64();
10143 TCGv_i64 fp1
= tcg_temp_new_i64();
10144 TCGv_i64 fp2
= tcg_temp_new_i64();
10146 gen_load_fpr64(ctx
, fp0
, fs
);
10147 gen_load_fpr64(ctx
, fp1
, ft
);
10148 gen_load_fpr64(ctx
, fp2
, fr
);
10149 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10150 tcg_temp_free_i64(fp0
);
10151 tcg_temp_free_i64(fp1
);
10152 gen_store_fpr64(ctx
, fp2
, fd
);
10153 tcg_temp_free_i64(fp2
);
10159 TCGv_i64 fp0
= tcg_temp_new_i64();
10160 TCGv_i64 fp1
= tcg_temp_new_i64();
10161 TCGv_i64 fp2
= tcg_temp_new_i64();
10163 gen_load_fpr64(ctx
, fp0
, fs
);
10164 gen_load_fpr64(ctx
, fp1
, ft
);
10165 gen_load_fpr64(ctx
, fp2
, fr
);
10166 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10167 tcg_temp_free_i64(fp0
);
10168 tcg_temp_free_i64(fp1
);
10169 gen_store_fpr64(ctx
, fp2
, fd
);
10170 tcg_temp_free_i64(fp2
);
10176 TCGv_i32 fp0
= tcg_temp_new_i32();
10177 TCGv_i32 fp1
= tcg_temp_new_i32();
10178 TCGv_i32 fp2
= tcg_temp_new_i32();
10180 gen_load_fpr32(ctx
, fp0
, fs
);
10181 gen_load_fpr32(ctx
, fp1
, ft
);
10182 gen_load_fpr32(ctx
, fp2
, fr
);
10183 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10184 tcg_temp_free_i32(fp0
);
10185 tcg_temp_free_i32(fp1
);
10186 gen_store_fpr32(ctx
, fp2
, fd
);
10187 tcg_temp_free_i32(fp2
);
10192 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10194 TCGv_i64 fp0
= tcg_temp_new_i64();
10195 TCGv_i64 fp1
= tcg_temp_new_i64();
10196 TCGv_i64 fp2
= tcg_temp_new_i64();
10198 gen_load_fpr64(ctx
, fp0
, fs
);
10199 gen_load_fpr64(ctx
, fp1
, ft
);
10200 gen_load_fpr64(ctx
, fp2
, fr
);
10201 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10202 tcg_temp_free_i64(fp0
);
10203 tcg_temp_free_i64(fp1
);
10204 gen_store_fpr64(ctx
, fp2
, fd
);
10205 tcg_temp_free_i64(fp2
);
10211 TCGv_i64 fp0
= tcg_temp_new_i64();
10212 TCGv_i64 fp1
= tcg_temp_new_i64();
10213 TCGv_i64 fp2
= tcg_temp_new_i64();
10215 gen_load_fpr64(ctx
, fp0
, fs
);
10216 gen_load_fpr64(ctx
, fp1
, ft
);
10217 gen_load_fpr64(ctx
, fp2
, fr
);
10218 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10219 tcg_temp_free_i64(fp0
);
10220 tcg_temp_free_i64(fp1
);
10221 gen_store_fpr64(ctx
, fp2
, fd
);
10222 tcg_temp_free_i64(fp2
);
10228 TCGv_i32 fp0
= tcg_temp_new_i32();
10229 TCGv_i32 fp1
= tcg_temp_new_i32();
10230 TCGv_i32 fp2
= tcg_temp_new_i32();
10232 gen_load_fpr32(ctx
, fp0
, fs
);
10233 gen_load_fpr32(ctx
, fp1
, ft
);
10234 gen_load_fpr32(ctx
, fp2
, fr
);
10235 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10236 tcg_temp_free_i32(fp0
);
10237 tcg_temp_free_i32(fp1
);
10238 gen_store_fpr32(ctx
, fp2
, fd
);
10239 tcg_temp_free_i32(fp2
);
10244 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10246 TCGv_i64 fp0
= tcg_temp_new_i64();
10247 TCGv_i64 fp1
= tcg_temp_new_i64();
10248 TCGv_i64 fp2
= tcg_temp_new_i64();
10250 gen_load_fpr64(ctx
, fp0
, fs
);
10251 gen_load_fpr64(ctx
, fp1
, ft
);
10252 gen_load_fpr64(ctx
, fp2
, fr
);
10253 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10254 tcg_temp_free_i64(fp0
);
10255 tcg_temp_free_i64(fp1
);
10256 gen_store_fpr64(ctx
, fp2
, fd
);
10257 tcg_temp_free_i64(fp2
);
10263 TCGv_i64 fp0
= tcg_temp_new_i64();
10264 TCGv_i64 fp1
= tcg_temp_new_i64();
10265 TCGv_i64 fp2
= tcg_temp_new_i64();
10267 gen_load_fpr64(ctx
, fp0
, fs
);
10268 gen_load_fpr64(ctx
, fp1
, ft
);
10269 gen_load_fpr64(ctx
, fp2
, fr
);
10270 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10271 tcg_temp_free_i64(fp0
);
10272 tcg_temp_free_i64(fp1
);
10273 gen_store_fpr64(ctx
, fp2
, fd
);
10274 tcg_temp_free_i64(fp2
);
10280 TCGv_i32 fp0
= tcg_temp_new_i32();
10281 TCGv_i32 fp1
= tcg_temp_new_i32();
10282 TCGv_i32 fp2
= tcg_temp_new_i32();
10284 gen_load_fpr32(ctx
, fp0
, fs
);
10285 gen_load_fpr32(ctx
, fp1
, ft
);
10286 gen_load_fpr32(ctx
, fp2
, fr
);
10287 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10288 tcg_temp_free_i32(fp0
);
10289 tcg_temp_free_i32(fp1
);
10290 gen_store_fpr32(ctx
, fp2
, fd
);
10291 tcg_temp_free_i32(fp2
);
10296 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10298 TCGv_i64 fp0
= tcg_temp_new_i64();
10299 TCGv_i64 fp1
= tcg_temp_new_i64();
10300 TCGv_i64 fp2
= tcg_temp_new_i64();
10302 gen_load_fpr64(ctx
, fp0
, fs
);
10303 gen_load_fpr64(ctx
, fp1
, ft
);
10304 gen_load_fpr64(ctx
, fp2
, fr
);
10305 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10306 tcg_temp_free_i64(fp0
);
10307 tcg_temp_free_i64(fp1
);
10308 gen_store_fpr64(ctx
, fp2
, fd
);
10309 tcg_temp_free_i64(fp2
);
10315 TCGv_i64 fp0
= tcg_temp_new_i64();
10316 TCGv_i64 fp1
= tcg_temp_new_i64();
10317 TCGv_i64 fp2
= tcg_temp_new_i64();
10319 gen_load_fpr64(ctx
, fp0
, fs
);
10320 gen_load_fpr64(ctx
, fp1
, ft
);
10321 gen_load_fpr64(ctx
, fp2
, fr
);
10322 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10323 tcg_temp_free_i64(fp0
);
10324 tcg_temp_free_i64(fp1
);
10325 gen_store_fpr64(ctx
, fp2
, fd
);
10326 tcg_temp_free_i64(fp2
);
10330 MIPS_INVAL("flt3_arith");
10331 generate_exception_end(ctx
, EXCP_RI
);
10336 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10340 #if !defined(CONFIG_USER_ONLY)
10341 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10342 Therefore only check the ISA in system mode. */
10343 check_insn(ctx
, ISA_MIPS32R2
);
10345 t0
= tcg_temp_new();
10349 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10350 gen_store_gpr(t0
, rt
);
10353 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10354 gen_store_gpr(t0
, rt
);
10357 gen_helper_rdhwr_cc(t0
, cpu_env
);
10358 gen_store_gpr(t0
, rt
);
10361 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10362 gen_store_gpr(t0
, rt
);
10365 check_insn(ctx
, ISA_MIPS32R6
);
10367 /* Performance counter registers are not implemented other than
10368 * control register 0.
10370 generate_exception(ctx
, EXCP_RI
);
10372 gen_helper_rdhwr_performance(t0
, cpu_env
);
10373 gen_store_gpr(t0
, rt
);
10376 check_insn(ctx
, ISA_MIPS32R6
);
10377 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10378 gen_store_gpr(t0
, rt
);
10381 #if defined(CONFIG_USER_ONLY)
10382 tcg_gen_ld_tl(t0
, cpu_env
,
10383 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10384 gen_store_gpr(t0
, rt
);
10387 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10388 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10389 tcg_gen_ld_tl(t0
, cpu_env
,
10390 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10391 gen_store_gpr(t0
, rt
);
10393 generate_exception_end(ctx
, EXCP_RI
);
10397 default: /* Invalid */
10398 MIPS_INVAL("rdhwr");
10399 generate_exception_end(ctx
, EXCP_RI
);
10405 static inline void clear_branch_hflags(DisasContext
*ctx
)
10407 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10408 if (ctx
->bstate
== BS_NONE
) {
10409 save_cpu_state(ctx
, 0);
10411 /* it is not safe to save ctx->hflags as hflags may be changed
10412 in execution time by the instruction in delay / forbidden slot. */
10413 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10417 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10419 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10420 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10421 /* Branches completion */
10422 clear_branch_hflags(ctx
);
10423 ctx
->bstate
= BS_BRANCH
;
10424 /* FIXME: Need to clear can_do_io. */
10425 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10426 case MIPS_HFLAG_FBNSLOT
:
10427 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10430 /* unconditional branch */
10431 if (proc_hflags
& MIPS_HFLAG_BX
) {
10432 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10434 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10436 case MIPS_HFLAG_BL
:
10437 /* blikely taken case */
10438 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10440 case MIPS_HFLAG_BC
:
10441 /* Conditional branch */
10443 TCGLabel
*l1
= gen_new_label();
10445 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10446 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10448 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10451 case MIPS_HFLAG_BR
:
10452 /* unconditional branch to register */
10453 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10454 TCGv t0
= tcg_temp_new();
10455 TCGv_i32 t1
= tcg_temp_new_i32();
10457 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10458 tcg_gen_trunc_tl_i32(t1
, t0
);
10460 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10461 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10462 tcg_gen_or_i32(hflags
, hflags
, t1
);
10463 tcg_temp_free_i32(t1
);
10465 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10467 tcg_gen_mov_tl(cpu_PC
, btarget
);
10469 if (ctx
->singlestep_enabled
) {
10470 save_cpu_state(ctx
, 0);
10471 gen_helper_raise_exception_debug(cpu_env
);
10473 tcg_gen_exit_tb(0);
10476 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10482 /* Compact Branches */
10483 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10484 int rs
, int rt
, int32_t offset
)
10486 int bcond_compute
= 0;
10487 TCGv t0
= tcg_temp_new();
10488 TCGv t1
= tcg_temp_new();
10489 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10491 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10492 #ifdef MIPS_DEBUG_DISAS
10493 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10496 generate_exception_end(ctx
, EXCP_RI
);
10500 /* Load needed operands and calculate btarget */
10502 /* compact branch */
10503 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10504 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10505 gen_load_gpr(t0
, rs
);
10506 gen_load_gpr(t1
, rt
);
10508 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10509 if (rs
<= rt
&& rs
== 0) {
10510 /* OPC_BEQZALC, OPC_BNEZALC */
10511 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10514 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10515 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10516 gen_load_gpr(t0
, rs
);
10517 gen_load_gpr(t1
, rt
);
10519 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10521 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10522 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10523 if (rs
== 0 || rs
== rt
) {
10524 /* OPC_BLEZALC, OPC_BGEZALC */
10525 /* OPC_BGTZALC, OPC_BLTZALC */
10526 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10528 gen_load_gpr(t0
, rs
);
10529 gen_load_gpr(t1
, rt
);
10531 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10535 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10540 /* OPC_BEQZC, OPC_BNEZC */
10541 gen_load_gpr(t0
, rs
);
10543 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10545 /* OPC_JIC, OPC_JIALC */
10546 TCGv tbase
= tcg_temp_new();
10547 TCGv toffset
= tcg_temp_new();
10549 gen_load_gpr(tbase
, rt
);
10550 tcg_gen_movi_tl(toffset
, offset
);
10551 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10552 tcg_temp_free(tbase
);
10553 tcg_temp_free(toffset
);
10557 MIPS_INVAL("Compact branch/jump");
10558 generate_exception_end(ctx
, EXCP_RI
);
10562 if (bcond_compute
== 0) {
10563 /* Uncoditional compact branch */
10566 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10569 ctx
->hflags
|= MIPS_HFLAG_BR
;
10572 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10575 ctx
->hflags
|= MIPS_HFLAG_B
;
10578 MIPS_INVAL("Compact branch/jump");
10579 generate_exception_end(ctx
, EXCP_RI
);
10583 /* Generating branch here as compact branches don't have delay slot */
10584 gen_branch(ctx
, 4);
10586 /* Conditional compact branch */
10587 TCGLabel
*fs
= gen_new_label();
10588 save_cpu_state(ctx
, 0);
10591 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10592 if (rs
== 0 && rt
!= 0) {
10594 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10595 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10597 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10600 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10603 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10604 if (rs
== 0 && rt
!= 0) {
10606 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10607 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10609 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10612 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10615 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10616 if (rs
== 0 && rt
!= 0) {
10618 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10619 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10621 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10624 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10627 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10628 if (rs
== 0 && rt
!= 0) {
10630 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10631 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10633 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10636 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10639 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10640 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10642 /* OPC_BOVC, OPC_BNVC */
10643 TCGv t2
= tcg_temp_new();
10644 TCGv t3
= tcg_temp_new();
10645 TCGv t4
= tcg_temp_new();
10646 TCGv input_overflow
= tcg_temp_new();
10648 gen_load_gpr(t0
, rs
);
10649 gen_load_gpr(t1
, rt
);
10650 tcg_gen_ext32s_tl(t2
, t0
);
10651 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10652 tcg_gen_ext32s_tl(t3
, t1
);
10653 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10654 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10656 tcg_gen_add_tl(t4
, t2
, t3
);
10657 tcg_gen_ext32s_tl(t4
, t4
);
10658 tcg_gen_xor_tl(t2
, t2
, t3
);
10659 tcg_gen_xor_tl(t3
, t4
, t3
);
10660 tcg_gen_andc_tl(t2
, t3
, t2
);
10661 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10662 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10663 if (opc
== OPC_BOVC
) {
10665 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10668 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10670 tcg_temp_free(input_overflow
);
10674 } else if (rs
< rt
&& rs
== 0) {
10675 /* OPC_BEQZALC, OPC_BNEZALC */
10676 if (opc
== OPC_BEQZALC
) {
10678 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10681 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10684 /* OPC_BEQC, OPC_BNEC */
10685 if (opc
== OPC_BEQC
) {
10687 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10690 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10695 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10698 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10701 MIPS_INVAL("Compact conditional branch/jump");
10702 generate_exception_end(ctx
, EXCP_RI
);
10706 /* Generating branch here as compact branches don't have delay slot */
10707 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10710 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10718 /* ISA extensions (ASEs) */
10719 /* MIPS16 extension to MIPS32 */
10721 /* MIPS16 major opcodes */
10723 M16_OPC_ADDIUSP
= 0x00,
10724 M16_OPC_ADDIUPC
= 0x01,
10726 M16_OPC_JAL
= 0x03,
10727 M16_OPC_BEQZ
= 0x04,
10728 M16_OPC_BNEQZ
= 0x05,
10729 M16_OPC_SHIFT
= 0x06,
10731 M16_OPC_RRIA
= 0x08,
10732 M16_OPC_ADDIU8
= 0x09,
10733 M16_OPC_SLTI
= 0x0a,
10734 M16_OPC_SLTIU
= 0x0b,
10737 M16_OPC_CMPI
= 0x0e,
10741 M16_OPC_LWSP
= 0x12,
10743 M16_OPC_LBU
= 0x14,
10744 M16_OPC_LHU
= 0x15,
10745 M16_OPC_LWPC
= 0x16,
10746 M16_OPC_LWU
= 0x17,
10749 M16_OPC_SWSP
= 0x1a,
10751 M16_OPC_RRR
= 0x1c,
10753 M16_OPC_EXTEND
= 0x1e,
10757 /* I8 funct field */
10776 /* RR funct field */
10810 /* I64 funct field */
10818 I64_DADDIUPC
= 0x6,
10822 /* RR ry field for CNVT */
10824 RR_RY_CNVT_ZEB
= 0x0,
10825 RR_RY_CNVT_ZEH
= 0x1,
10826 RR_RY_CNVT_ZEW
= 0x2,
10827 RR_RY_CNVT_SEB
= 0x4,
10828 RR_RY_CNVT_SEH
= 0x5,
10829 RR_RY_CNVT_SEW
= 0x6,
10832 static int xlat (int r
)
10834 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
10839 static void gen_mips16_save (DisasContext
*ctx
,
10840 int xsregs
, int aregs
,
10841 int do_ra
, int do_s0
, int do_s1
,
10844 TCGv t0
= tcg_temp_new();
10845 TCGv t1
= tcg_temp_new();
10846 TCGv t2
= tcg_temp_new();
10876 generate_exception_end(ctx
, EXCP_RI
);
10882 gen_base_offset_addr(ctx
, t0
, 29, 12);
10883 gen_load_gpr(t1
, 7);
10884 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10887 gen_base_offset_addr(ctx
, t0
, 29, 8);
10888 gen_load_gpr(t1
, 6);
10889 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10892 gen_base_offset_addr(ctx
, t0
, 29, 4);
10893 gen_load_gpr(t1
, 5);
10894 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10897 gen_base_offset_addr(ctx
, t0
, 29, 0);
10898 gen_load_gpr(t1
, 4);
10899 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
10902 gen_load_gpr(t0
, 29);
10904 #define DECR_AND_STORE(reg) do { \
10905 tcg_gen_movi_tl(t2, -4); \
10906 gen_op_addr_add(ctx, t0, t0, t2); \
10907 gen_load_gpr(t1, reg); \
10908 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
10912 DECR_AND_STORE(31);
10917 DECR_AND_STORE(30);
10920 DECR_AND_STORE(23);
10923 DECR_AND_STORE(22);
10926 DECR_AND_STORE(21);
10929 DECR_AND_STORE(20);
10932 DECR_AND_STORE(19);
10935 DECR_AND_STORE(18);
10939 DECR_AND_STORE(17);
10942 DECR_AND_STORE(16);
10972 generate_exception_end(ctx
, EXCP_RI
);
10988 #undef DECR_AND_STORE
10990 tcg_gen_movi_tl(t2
, -framesize
);
10991 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
10997 static void gen_mips16_restore (DisasContext
*ctx
,
10998 int xsregs
, int aregs
,
10999 int do_ra
, int do_s0
, int do_s1
,
11003 TCGv t0
= tcg_temp_new();
11004 TCGv t1
= tcg_temp_new();
11005 TCGv t2
= tcg_temp_new();
11007 tcg_gen_movi_tl(t2
, framesize
);
11008 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11010 #define DECR_AND_LOAD(reg) do { \
11011 tcg_gen_movi_tl(t2, -4); \
11012 gen_op_addr_add(ctx, t0, t0, t2); \
11013 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11014 gen_store_gpr(t1, reg); \
11078 generate_exception_end(ctx
, EXCP_RI
);
11094 #undef DECR_AND_LOAD
11096 tcg_gen_movi_tl(t2
, framesize
);
11097 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11103 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11104 int is_64_bit
, int extended
)
11108 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11109 generate_exception_end(ctx
, EXCP_RI
);
11113 t0
= tcg_temp_new();
11115 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11116 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11118 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11124 #if defined(TARGET_MIPS64)
11125 static void decode_i64_mips16 (DisasContext
*ctx
,
11126 int ry
, int funct
, int16_t offset
,
11131 check_insn(ctx
, ISA_MIPS3
);
11132 check_mips_64(ctx
);
11133 offset
= extended
? offset
: offset
<< 3;
11134 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11137 check_insn(ctx
, ISA_MIPS3
);
11138 check_mips_64(ctx
);
11139 offset
= extended
? offset
: offset
<< 3;
11140 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11143 check_insn(ctx
, ISA_MIPS3
);
11144 check_mips_64(ctx
);
11145 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11146 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11149 check_insn(ctx
, ISA_MIPS3
);
11150 check_mips_64(ctx
);
11151 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11152 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11155 check_insn(ctx
, ISA_MIPS3
);
11156 check_mips_64(ctx
);
11157 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11158 generate_exception_end(ctx
, EXCP_RI
);
11160 offset
= extended
? offset
: offset
<< 3;
11161 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11165 check_insn(ctx
, ISA_MIPS3
);
11166 check_mips_64(ctx
);
11167 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11168 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11171 check_insn(ctx
, ISA_MIPS3
);
11172 check_mips_64(ctx
);
11173 offset
= extended
? offset
: offset
<< 2;
11174 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11177 check_insn(ctx
, ISA_MIPS3
);
11178 check_mips_64(ctx
);
11179 offset
= extended
? offset
: offset
<< 2;
11180 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11186 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11188 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11189 int op
, rx
, ry
, funct
, sa
;
11190 int16_t imm
, offset
;
11192 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11193 op
= (ctx
->opcode
>> 11) & 0x1f;
11194 sa
= (ctx
->opcode
>> 22) & 0x1f;
11195 funct
= (ctx
->opcode
>> 8) & 0x7;
11196 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11197 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11198 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11199 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11200 | (ctx
->opcode
& 0x1f));
11202 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11205 case M16_OPC_ADDIUSP
:
11206 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11208 case M16_OPC_ADDIUPC
:
11209 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11212 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11213 /* No delay slot, so just process as a normal instruction */
11216 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11217 /* No delay slot, so just process as a normal instruction */
11219 case M16_OPC_BNEQZ
:
11220 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11221 /* No delay slot, so just process as a normal instruction */
11223 case M16_OPC_SHIFT
:
11224 switch (ctx
->opcode
& 0x3) {
11226 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11229 #if defined(TARGET_MIPS64)
11230 check_mips_64(ctx
);
11231 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11233 generate_exception_end(ctx
, EXCP_RI
);
11237 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11240 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11244 #if defined(TARGET_MIPS64)
11246 check_insn(ctx
, ISA_MIPS3
);
11247 check_mips_64(ctx
);
11248 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11252 imm
= ctx
->opcode
& 0xf;
11253 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11254 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11255 imm
= (int16_t) (imm
<< 1) >> 1;
11256 if ((ctx
->opcode
>> 4) & 0x1) {
11257 #if defined(TARGET_MIPS64)
11258 check_mips_64(ctx
);
11259 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11261 generate_exception_end(ctx
, EXCP_RI
);
11264 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11267 case M16_OPC_ADDIU8
:
11268 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11271 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11273 case M16_OPC_SLTIU
:
11274 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11279 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11282 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11285 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11288 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11291 check_insn(ctx
, ISA_MIPS32
);
11293 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11294 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11295 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11296 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11297 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11298 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11299 | (ctx
->opcode
& 0xf)) << 3;
11301 if (ctx
->opcode
& (1 << 7)) {
11302 gen_mips16_save(ctx
, xsregs
, aregs
,
11303 do_ra
, do_s0
, do_s1
,
11306 gen_mips16_restore(ctx
, xsregs
, aregs
,
11307 do_ra
, do_s0
, do_s1
,
11313 generate_exception_end(ctx
, EXCP_RI
);
11318 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11321 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11323 #if defined(TARGET_MIPS64)
11325 check_insn(ctx
, ISA_MIPS3
);
11326 check_mips_64(ctx
);
11327 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11331 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11334 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11337 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11340 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11343 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11346 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11349 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11351 #if defined(TARGET_MIPS64)
11353 check_insn(ctx
, ISA_MIPS3
);
11354 check_mips_64(ctx
);
11355 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11359 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11362 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11365 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11368 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11370 #if defined(TARGET_MIPS64)
11372 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11376 generate_exception_end(ctx
, EXCP_RI
);
11383 static inline bool is_uhi(int sdbbp_code
)
11385 #ifdef CONFIG_USER_ONLY
11388 return semihosting_enabled() && sdbbp_code
== 1;
11392 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11396 int op
, cnvt_op
, op1
, offset
;
11400 op
= (ctx
->opcode
>> 11) & 0x1f;
11401 sa
= (ctx
->opcode
>> 2) & 0x7;
11402 sa
= sa
== 0 ? 8 : sa
;
11403 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11404 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11405 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11406 op1
= offset
= ctx
->opcode
& 0x1f;
11411 case M16_OPC_ADDIUSP
:
11413 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11415 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11418 case M16_OPC_ADDIUPC
:
11419 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11422 offset
= (ctx
->opcode
& 0x7ff) << 1;
11423 offset
= (int16_t)(offset
<< 4) >> 4;
11424 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11425 /* No delay slot, so just process as a normal instruction */
11428 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11429 offset
= (((ctx
->opcode
& 0x1f) << 21)
11430 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11432 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11433 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11437 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11438 ((int8_t)ctx
->opcode
) << 1, 0);
11439 /* No delay slot, so just process as a normal instruction */
11441 case M16_OPC_BNEQZ
:
11442 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11443 ((int8_t)ctx
->opcode
) << 1, 0);
11444 /* No delay slot, so just process as a normal instruction */
11446 case M16_OPC_SHIFT
:
11447 switch (ctx
->opcode
& 0x3) {
11449 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11452 #if defined(TARGET_MIPS64)
11453 check_insn(ctx
, ISA_MIPS3
);
11454 check_mips_64(ctx
);
11455 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11457 generate_exception_end(ctx
, EXCP_RI
);
11461 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11464 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11468 #if defined(TARGET_MIPS64)
11470 check_insn(ctx
, ISA_MIPS3
);
11471 check_mips_64(ctx
);
11472 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11477 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11479 if ((ctx
->opcode
>> 4) & 1) {
11480 #if defined(TARGET_MIPS64)
11481 check_insn(ctx
, ISA_MIPS3
);
11482 check_mips_64(ctx
);
11483 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11485 generate_exception_end(ctx
, EXCP_RI
);
11488 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11492 case M16_OPC_ADDIU8
:
11494 int16_t imm
= (int8_t) ctx
->opcode
;
11496 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11501 int16_t imm
= (uint8_t) ctx
->opcode
;
11502 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11505 case M16_OPC_SLTIU
:
11507 int16_t imm
= (uint8_t) ctx
->opcode
;
11508 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11515 funct
= (ctx
->opcode
>> 8) & 0x7;
11518 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11519 ((int8_t)ctx
->opcode
) << 1, 0);
11522 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11523 ((int8_t)ctx
->opcode
) << 1, 0);
11526 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11529 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11530 ((int8_t)ctx
->opcode
) << 3);
11533 check_insn(ctx
, ISA_MIPS32
);
11535 int do_ra
= ctx
->opcode
& (1 << 6);
11536 int do_s0
= ctx
->opcode
& (1 << 5);
11537 int do_s1
= ctx
->opcode
& (1 << 4);
11538 int framesize
= ctx
->opcode
& 0xf;
11540 if (framesize
== 0) {
11543 framesize
= framesize
<< 3;
11546 if (ctx
->opcode
& (1 << 7)) {
11547 gen_mips16_save(ctx
, 0, 0,
11548 do_ra
, do_s0
, do_s1
, framesize
);
11550 gen_mips16_restore(ctx
, 0, 0,
11551 do_ra
, do_s0
, do_s1
, framesize
);
11557 int rz
= xlat(ctx
->opcode
& 0x7);
11559 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11560 ((ctx
->opcode
>> 5) & 0x7);
11561 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11565 reg32
= ctx
->opcode
& 0x1f;
11566 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11569 generate_exception_end(ctx
, EXCP_RI
);
11576 int16_t imm
= (uint8_t) ctx
->opcode
;
11578 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11583 int16_t imm
= (uint8_t) ctx
->opcode
;
11584 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11587 #if defined(TARGET_MIPS64)
11589 check_insn(ctx
, ISA_MIPS3
);
11590 check_mips_64(ctx
);
11591 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11595 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11598 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11601 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11604 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11607 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11610 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11613 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11615 #if defined (TARGET_MIPS64)
11617 check_insn(ctx
, ISA_MIPS3
);
11618 check_mips_64(ctx
);
11619 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11623 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11626 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11629 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11632 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11636 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11639 switch (ctx
->opcode
& 0x3) {
11641 mips32_op
= OPC_ADDU
;
11644 mips32_op
= OPC_SUBU
;
11646 #if defined(TARGET_MIPS64)
11648 mips32_op
= OPC_DADDU
;
11649 check_insn(ctx
, ISA_MIPS3
);
11650 check_mips_64(ctx
);
11653 mips32_op
= OPC_DSUBU
;
11654 check_insn(ctx
, ISA_MIPS3
);
11655 check_mips_64(ctx
);
11659 generate_exception_end(ctx
, EXCP_RI
);
11663 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11672 int nd
= (ctx
->opcode
>> 7) & 0x1;
11673 int link
= (ctx
->opcode
>> 6) & 0x1;
11674 int ra
= (ctx
->opcode
>> 5) & 0x1;
11677 check_insn(ctx
, ISA_MIPS32
);
11686 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11691 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11692 gen_helper_do_semihosting(cpu_env
);
11694 /* XXX: not clear which exception should be raised
11695 * when in debug mode...
11697 check_insn(ctx
, ISA_MIPS32
);
11698 generate_exception_end(ctx
, EXCP_DBp
);
11702 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11705 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11708 generate_exception_end(ctx
, EXCP_BREAK
);
11711 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11714 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11717 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11719 #if defined (TARGET_MIPS64)
11721 check_insn(ctx
, ISA_MIPS3
);
11722 check_mips_64(ctx
);
11723 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11727 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11730 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11733 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11736 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11739 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
11742 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
11745 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
11748 check_insn(ctx
, ISA_MIPS32
);
11750 case RR_RY_CNVT_ZEB
:
11751 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11753 case RR_RY_CNVT_ZEH
:
11754 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11756 case RR_RY_CNVT_SEB
:
11757 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11759 case RR_RY_CNVT_SEH
:
11760 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11762 #if defined (TARGET_MIPS64)
11763 case RR_RY_CNVT_ZEW
:
11764 check_insn(ctx
, ISA_MIPS64
);
11765 check_mips_64(ctx
);
11766 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11768 case RR_RY_CNVT_SEW
:
11769 check_insn(ctx
, ISA_MIPS64
);
11770 check_mips_64(ctx
);
11771 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11775 generate_exception_end(ctx
, EXCP_RI
);
11780 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
11782 #if defined (TARGET_MIPS64)
11784 check_insn(ctx
, ISA_MIPS3
);
11785 check_mips_64(ctx
);
11786 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
11789 check_insn(ctx
, ISA_MIPS3
);
11790 check_mips_64(ctx
);
11791 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
11794 check_insn(ctx
, ISA_MIPS3
);
11795 check_mips_64(ctx
);
11796 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
11799 check_insn(ctx
, ISA_MIPS3
);
11800 check_mips_64(ctx
);
11801 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
11805 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
11808 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
11811 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
11814 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
11816 #if defined (TARGET_MIPS64)
11818 check_insn(ctx
, ISA_MIPS3
);
11819 check_mips_64(ctx
);
11820 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
11823 check_insn(ctx
, ISA_MIPS3
);
11824 check_mips_64(ctx
);
11825 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
11828 check_insn(ctx
, ISA_MIPS3
);
11829 check_mips_64(ctx
);
11830 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
11833 check_insn(ctx
, ISA_MIPS3
);
11834 check_mips_64(ctx
);
11835 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
11839 generate_exception_end(ctx
, EXCP_RI
);
11843 case M16_OPC_EXTEND
:
11844 decode_extended_mips16_opc(env
, ctx
);
11847 #if defined(TARGET_MIPS64)
11849 funct
= (ctx
->opcode
>> 8) & 0x7;
11850 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
11854 generate_exception_end(ctx
, EXCP_RI
);
11861 /* microMIPS extension to MIPS32/MIPS64 */
11864 * microMIPS32/microMIPS64 major opcodes
11866 * 1. MIPS Architecture for Programmers Volume II-B:
11867 * The microMIPS32 Instruction Set (Revision 3.05)
11869 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
11871 * 2. MIPS Architecture For Programmers Volume II-A:
11872 * The MIPS64 Instruction Set (Revision 3.51)
11902 POOL32S
= 0x16, /* MIPS64 */
11903 DADDIU32
= 0x17, /* MIPS64 */
11932 /* 0x29 is reserved */
11945 /* 0x31 is reserved */
11958 SD32
= 0x36, /* MIPS64 */
11959 LD32
= 0x37, /* MIPS64 */
11961 /* 0x39 is reserved */
11977 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
11987 /* POOL32A encoding of minor opcode field */
11990 /* These opcodes are distinguished only by bits 9..6; those bits are
11991 * what are recorded below. */
12028 /* The following can be distinguished by their lower 6 bits. */
12037 /* POOL32AXF encoding of minor opcode field extension */
12040 * 1. MIPS Architecture for Programmers Volume II-B:
12041 * The microMIPS32 Instruction Set (Revision 3.05)
12043 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12045 * 2. MIPS Architecture for Programmers VolumeIV-e:
12046 * The MIPS DSP Application-Specific Extension
12047 * to the microMIPS32 Architecture (Revision 2.34)
12049 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12064 /* begin of microMIPS32 DSP */
12066 /* bits 13..12 for 0x01 */
12072 /* bits 13..12 for 0x2a */
12078 /* bits 13..12 for 0x32 */
12082 /* end of microMIPS32 DSP */
12084 /* bits 15..12 for 0x2c */
12101 /* bits 15..12 for 0x34 */
12109 /* bits 15..12 for 0x3c */
12111 JR
= 0x0, /* alias */
12119 /* bits 15..12 for 0x05 */
12123 /* bits 15..12 for 0x0d */
12135 /* bits 15..12 for 0x15 */
12141 /* bits 15..12 for 0x1d */
12145 /* bits 15..12 for 0x2d */
12150 /* bits 15..12 for 0x35 */
12157 /* POOL32B encoding of minor opcode field (bits 15..12) */
12173 /* POOL32C encoding of minor opcode field (bits 15..12) */
12181 /* 0xa is reserved */
12188 /* 0x6 is reserved */
12194 /* POOL32F encoding of minor opcode field (bits 5..0) */
12197 /* These are the bit 7..6 values */
12206 /* These are the bit 8..6 values */
12231 MOVZ_FMT_05
= 0x05,
12265 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12272 /* POOL32Fxf encoding of minor opcode extension field */
12310 /* POOL32I encoding of minor opcode field (bits 25..21) */
12340 /* These overlap and are distinguished by bit16 of the instruction */
12349 /* POOL16A encoding of minor opcode field */
12356 /* POOL16B encoding of minor opcode field */
12363 /* POOL16C encoding of minor opcode field */
12383 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12403 /* POOL16D encoding of minor opcode field */
12410 /* POOL16E encoding of minor opcode field */
12417 static int mmreg (int r
)
12419 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12424 /* Used for 16-bit store instructions. */
12425 static int mmreg2 (int r
)
12427 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12432 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12433 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12434 #define uMIPS_RS2(op) uMIPS_RS(op)
12435 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12436 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12437 #define uMIPS_RS5(op) (op & 0x1f)
12439 /* Signed immediate */
12440 #define SIMM(op, start, width) \
12441 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12444 /* Zero-extended immediate */
12445 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12447 static void gen_addiur1sp(DisasContext
*ctx
)
12449 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12451 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12454 static void gen_addiur2(DisasContext
*ctx
)
12456 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12457 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12458 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12460 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12463 static void gen_addiusp(DisasContext
*ctx
)
12465 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12468 if (encoded
<= 1) {
12469 decoded
= 256 + encoded
;
12470 } else if (encoded
<= 255) {
12472 } else if (encoded
<= 509) {
12473 decoded
= encoded
- 512;
12475 decoded
= encoded
- 768;
12478 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12481 static void gen_addius5(DisasContext
*ctx
)
12483 int imm
= SIMM(ctx
->opcode
, 1, 4);
12484 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12486 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12489 static void gen_andi16(DisasContext
*ctx
)
12491 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12492 31, 32, 63, 64, 255, 32768, 65535 };
12493 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12494 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12495 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12497 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12500 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12501 int base
, int16_t offset
)
12506 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12507 generate_exception_end(ctx
, EXCP_RI
);
12511 t0
= tcg_temp_new();
12513 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12515 t1
= tcg_const_tl(reglist
);
12516 t2
= tcg_const_i32(ctx
->mem_idx
);
12518 save_cpu_state(ctx
, 1);
12521 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12524 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12526 #ifdef TARGET_MIPS64
12528 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12531 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12537 tcg_temp_free_i32(t2
);
12541 static void gen_pool16c_insn(DisasContext
*ctx
)
12543 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12544 int rs
= mmreg(ctx
->opcode
& 0x7);
12546 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12551 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12557 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12563 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12569 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12576 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12577 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12579 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12588 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12589 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12591 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12598 int reg
= ctx
->opcode
& 0x1f;
12600 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12606 int reg
= ctx
->opcode
& 0x1f;
12607 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12608 /* Let normal delay slot handling in our caller take us
12609 to the branch target. */
12614 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12615 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12619 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12620 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12624 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12628 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12631 generate_exception_end(ctx
, EXCP_BREAK
);
12634 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12635 gen_helper_do_semihosting(cpu_env
);
12637 /* XXX: not clear which exception should be raised
12638 * when in debug mode...
12640 check_insn(ctx
, ISA_MIPS32
);
12641 generate_exception_end(ctx
, EXCP_DBp
);
12644 case JRADDIUSP
+ 0:
12645 case JRADDIUSP
+ 1:
12647 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12648 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12649 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12650 /* Let normal delay slot handling in our caller take us
12651 to the branch target. */
12655 generate_exception_end(ctx
, EXCP_RI
);
12660 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12663 int rd
, rs
, re
, rt
;
12664 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12665 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12666 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12667 rd
= rd_enc
[enc_dest
];
12668 re
= re_enc
[enc_dest
];
12669 rs
= rs_rt_enc
[enc_rs
];
12670 rt
= rs_rt_enc
[enc_rt
];
12672 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12674 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12677 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12679 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12683 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12685 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12686 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12688 switch (ctx
->opcode
& 0xf) {
12690 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12693 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12697 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12698 int offset
= extract32(ctx
->opcode
, 4, 4);
12699 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12702 case R6_JRC16
: /* JRCADDIUSP */
12703 if ((ctx
->opcode
>> 4) & 1) {
12705 int imm
= extract32(ctx
->opcode
, 5, 5);
12706 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12707 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12710 int rs
= extract32(ctx
->opcode
, 5, 5);
12711 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12714 case MOVEP
... MOVEP_07
:
12715 case MOVEP_0C
... MOVEP_0F
:
12717 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12718 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12719 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12720 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12724 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12727 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12731 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12732 int offset
= extract32(ctx
->opcode
, 4, 4);
12733 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12736 case JALRC16
: /* BREAK16, SDBBP16 */
12737 switch (ctx
->opcode
& 0x3f) {
12739 case JALRC16
+ 0x20:
12741 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
12746 generate_exception(ctx
, EXCP_BREAK
);
12750 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
12751 gen_helper_do_semihosting(cpu_env
);
12753 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
12754 generate_exception(ctx
, EXCP_RI
);
12756 generate_exception(ctx
, EXCP_DBp
);
12763 generate_exception(ctx
, EXCP_RI
);
12768 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
12770 TCGv t0
= tcg_temp_new();
12771 TCGv t1
= tcg_temp_new();
12773 gen_load_gpr(t0
, base
);
12776 gen_load_gpr(t1
, index
);
12777 tcg_gen_shli_tl(t1
, t1
, 2);
12778 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12781 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12782 gen_store_gpr(t1
, rd
);
12788 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
12789 int base
, int16_t offset
)
12793 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
12794 generate_exception_end(ctx
, EXCP_RI
);
12798 t0
= tcg_temp_new();
12799 t1
= tcg_temp_new();
12801 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12806 generate_exception_end(ctx
, EXCP_RI
);
12809 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12810 gen_store_gpr(t1
, rd
);
12811 tcg_gen_movi_tl(t1
, 4);
12812 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12813 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12814 gen_store_gpr(t1
, rd
+1);
12817 gen_load_gpr(t1
, rd
);
12818 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12819 tcg_gen_movi_tl(t1
, 4);
12820 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12821 gen_load_gpr(t1
, rd
+1);
12822 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
12824 #ifdef TARGET_MIPS64
12827 generate_exception_end(ctx
, EXCP_RI
);
12830 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12831 gen_store_gpr(t1
, rd
);
12832 tcg_gen_movi_tl(t1
, 8);
12833 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12834 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12835 gen_store_gpr(t1
, rd
+1);
12838 gen_load_gpr(t1
, rd
);
12839 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12840 tcg_gen_movi_tl(t1
, 8);
12841 gen_op_addr_add(ctx
, t0
, t0
, t1
);
12842 gen_load_gpr(t1
, rd
+1);
12843 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
12851 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
12853 int extension
= (ctx
->opcode
>> 6) & 0x3f;
12854 int minor
= (ctx
->opcode
>> 12) & 0xf;
12855 uint32_t mips32_op
;
12857 switch (extension
) {
12859 mips32_op
= OPC_TEQ
;
12862 mips32_op
= OPC_TGE
;
12865 mips32_op
= OPC_TGEU
;
12868 mips32_op
= OPC_TLT
;
12871 mips32_op
= OPC_TLTU
;
12874 mips32_op
= OPC_TNE
;
12876 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
12878 #ifndef CONFIG_USER_ONLY
12881 check_cp0_enabled(ctx
);
12883 /* Treat as NOP. */
12886 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
12890 check_cp0_enabled(ctx
);
12892 TCGv t0
= tcg_temp_new();
12894 gen_load_gpr(t0
, rt
);
12895 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
12901 switch (minor
& 3) {
12903 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12906 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12909 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12912 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12915 goto pool32axf_invalid
;
12919 switch (minor
& 3) {
12921 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12924 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
12927 goto pool32axf_invalid
;
12933 check_insn(ctx
, ISA_MIPS32R6
);
12934 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
12937 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
12940 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
12943 mips32_op
= OPC_CLO
;
12946 mips32_op
= OPC_CLZ
;
12948 check_insn(ctx
, ISA_MIPS32
);
12949 gen_cl(ctx
, mips32_op
, rt
, rs
);
12952 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12953 gen_rdhwr(ctx
, rt
, rs
, 0);
12956 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
12959 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12960 mips32_op
= OPC_MULT
;
12963 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12964 mips32_op
= OPC_MULTU
;
12967 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12968 mips32_op
= OPC_DIV
;
12971 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12972 mips32_op
= OPC_DIVU
;
12975 check_insn(ctx
, ISA_MIPS32
);
12976 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12979 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12980 mips32_op
= OPC_MADD
;
12983 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12984 mips32_op
= OPC_MADDU
;
12987 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12988 mips32_op
= OPC_MSUB
;
12991 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
12992 mips32_op
= OPC_MSUBU
;
12994 check_insn(ctx
, ISA_MIPS32
);
12995 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
12998 goto pool32axf_invalid
;
13009 generate_exception_err(ctx
, EXCP_CpU
, 2);
13012 goto pool32axf_invalid
;
13017 case JALR
: /* JALRC */
13018 case JALR_HB
: /* JALRC_HB */
13019 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13020 /* JALRC, JALRC_HB */
13021 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13023 /* JALR, JALR_HB */
13024 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13025 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13030 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13031 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13032 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13035 goto pool32axf_invalid
;
13041 check_cp0_enabled(ctx
);
13042 check_insn(ctx
, ISA_MIPS32R2
);
13043 gen_load_srsgpr(rs
, rt
);
13046 check_cp0_enabled(ctx
);
13047 check_insn(ctx
, ISA_MIPS32R2
);
13048 gen_store_srsgpr(rs
, rt
);
13051 goto pool32axf_invalid
;
13054 #ifndef CONFIG_USER_ONLY
13058 mips32_op
= OPC_TLBP
;
13061 mips32_op
= OPC_TLBR
;
13064 mips32_op
= OPC_TLBWI
;
13067 mips32_op
= OPC_TLBWR
;
13070 mips32_op
= OPC_TLBINV
;
13073 mips32_op
= OPC_TLBINVF
;
13076 mips32_op
= OPC_WAIT
;
13079 mips32_op
= OPC_DERET
;
13082 mips32_op
= OPC_ERET
;
13084 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13087 goto pool32axf_invalid
;
13093 check_cp0_enabled(ctx
);
13095 TCGv t0
= tcg_temp_new();
13097 save_cpu_state(ctx
, 1);
13098 gen_helper_di(t0
, cpu_env
);
13099 gen_store_gpr(t0
, rs
);
13100 /* Stop translation as we may have switched the execution mode */
13101 ctx
->bstate
= BS_STOP
;
13106 check_cp0_enabled(ctx
);
13108 TCGv t0
= tcg_temp_new();
13110 save_cpu_state(ctx
, 1);
13111 gen_helper_ei(t0
, cpu_env
);
13112 gen_store_gpr(t0
, rs
);
13113 /* Stop translation as we may have switched the execution mode */
13114 ctx
->bstate
= BS_STOP
;
13119 goto pool32axf_invalid
;
13129 generate_exception_end(ctx
, EXCP_SYSCALL
);
13132 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13133 gen_helper_do_semihosting(cpu_env
);
13135 check_insn(ctx
, ISA_MIPS32
);
13136 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13137 generate_exception_end(ctx
, EXCP_RI
);
13139 generate_exception_end(ctx
, EXCP_DBp
);
13144 goto pool32axf_invalid
;
13148 switch (minor
& 3) {
13150 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13153 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13156 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13159 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13162 goto pool32axf_invalid
;
13166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13169 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13172 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13175 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13178 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13181 goto pool32axf_invalid
;
13186 MIPS_INVAL("pool32axf");
13187 generate_exception_end(ctx
, EXCP_RI
);
13192 /* Values for microMIPS fmt field. Variable-width, depending on which
13193 formats the instruction supports. */
13212 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13214 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13215 uint32_t mips32_op
;
13217 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13218 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13219 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13221 switch (extension
) {
13222 case FLOAT_1BIT_FMT(CFC1
, 0):
13223 mips32_op
= OPC_CFC1
;
13225 case FLOAT_1BIT_FMT(CTC1
, 0):
13226 mips32_op
= OPC_CTC1
;
13228 case FLOAT_1BIT_FMT(MFC1
, 0):
13229 mips32_op
= OPC_MFC1
;
13231 case FLOAT_1BIT_FMT(MTC1
, 0):
13232 mips32_op
= OPC_MTC1
;
13234 case FLOAT_1BIT_FMT(MFHC1
, 0):
13235 mips32_op
= OPC_MFHC1
;
13237 case FLOAT_1BIT_FMT(MTHC1
, 0):
13238 mips32_op
= OPC_MTHC1
;
13240 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13243 /* Reciprocal square root */
13244 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13245 mips32_op
= OPC_RSQRT_S
;
13247 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13248 mips32_op
= OPC_RSQRT_D
;
13252 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13253 mips32_op
= OPC_SQRT_S
;
13255 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13256 mips32_op
= OPC_SQRT_D
;
13260 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13261 mips32_op
= OPC_RECIP_S
;
13263 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13264 mips32_op
= OPC_RECIP_D
;
13268 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13269 mips32_op
= OPC_FLOOR_L_S
;
13271 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13272 mips32_op
= OPC_FLOOR_L_D
;
13274 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13275 mips32_op
= OPC_FLOOR_W_S
;
13277 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13278 mips32_op
= OPC_FLOOR_W_D
;
13282 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13283 mips32_op
= OPC_CEIL_L_S
;
13285 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13286 mips32_op
= OPC_CEIL_L_D
;
13288 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13289 mips32_op
= OPC_CEIL_W_S
;
13291 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13292 mips32_op
= OPC_CEIL_W_D
;
13296 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13297 mips32_op
= OPC_TRUNC_L_S
;
13299 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13300 mips32_op
= OPC_TRUNC_L_D
;
13302 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13303 mips32_op
= OPC_TRUNC_W_S
;
13305 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13306 mips32_op
= OPC_TRUNC_W_D
;
13310 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13311 mips32_op
= OPC_ROUND_L_S
;
13313 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13314 mips32_op
= OPC_ROUND_L_D
;
13316 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13317 mips32_op
= OPC_ROUND_W_S
;
13319 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13320 mips32_op
= OPC_ROUND_W_D
;
13323 /* Integer to floating-point conversion */
13324 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13325 mips32_op
= OPC_CVT_L_S
;
13327 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13328 mips32_op
= OPC_CVT_L_D
;
13330 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13331 mips32_op
= OPC_CVT_W_S
;
13333 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13334 mips32_op
= OPC_CVT_W_D
;
13337 /* Paired-foo conversions */
13338 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13339 mips32_op
= OPC_CVT_S_PL
;
13341 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13342 mips32_op
= OPC_CVT_S_PU
;
13344 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13345 mips32_op
= OPC_CVT_PW_PS
;
13347 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13348 mips32_op
= OPC_CVT_PS_PW
;
13351 /* Floating-point moves */
13352 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13353 mips32_op
= OPC_MOV_S
;
13355 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13356 mips32_op
= OPC_MOV_D
;
13358 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13359 mips32_op
= OPC_MOV_PS
;
13362 /* Absolute value */
13363 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13364 mips32_op
= OPC_ABS_S
;
13366 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13367 mips32_op
= OPC_ABS_D
;
13369 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13370 mips32_op
= OPC_ABS_PS
;
13374 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13375 mips32_op
= OPC_NEG_S
;
13377 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13378 mips32_op
= OPC_NEG_D
;
13380 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13381 mips32_op
= OPC_NEG_PS
;
13384 /* Reciprocal square root step */
13385 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13386 mips32_op
= OPC_RSQRT1_S
;
13388 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13389 mips32_op
= OPC_RSQRT1_D
;
13391 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13392 mips32_op
= OPC_RSQRT1_PS
;
13395 /* Reciprocal step */
13396 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13397 mips32_op
= OPC_RECIP1_S
;
13399 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13400 mips32_op
= OPC_RECIP1_S
;
13402 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13403 mips32_op
= OPC_RECIP1_PS
;
13406 /* Conversions from double */
13407 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13408 mips32_op
= OPC_CVT_D_S
;
13410 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13411 mips32_op
= OPC_CVT_D_W
;
13413 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13414 mips32_op
= OPC_CVT_D_L
;
13417 /* Conversions from single */
13418 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13419 mips32_op
= OPC_CVT_S_D
;
13421 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13422 mips32_op
= OPC_CVT_S_W
;
13424 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13425 mips32_op
= OPC_CVT_S_L
;
13427 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13430 /* Conditional moves on floating-point codes */
13431 case COND_FLOAT_MOV(MOVT
, 0):
13432 case COND_FLOAT_MOV(MOVT
, 1):
13433 case COND_FLOAT_MOV(MOVT
, 2):
13434 case COND_FLOAT_MOV(MOVT
, 3):
13435 case COND_FLOAT_MOV(MOVT
, 4):
13436 case COND_FLOAT_MOV(MOVT
, 5):
13437 case COND_FLOAT_MOV(MOVT
, 6):
13438 case COND_FLOAT_MOV(MOVT
, 7):
13439 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13440 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13442 case COND_FLOAT_MOV(MOVF
, 0):
13443 case COND_FLOAT_MOV(MOVF
, 1):
13444 case COND_FLOAT_MOV(MOVF
, 2):
13445 case COND_FLOAT_MOV(MOVF
, 3):
13446 case COND_FLOAT_MOV(MOVF
, 4):
13447 case COND_FLOAT_MOV(MOVF
, 5):
13448 case COND_FLOAT_MOV(MOVF
, 6):
13449 case COND_FLOAT_MOV(MOVF
, 7):
13450 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13451 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13454 MIPS_INVAL("pool32fxf");
13455 generate_exception_end(ctx
, EXCP_RI
);
13460 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13464 int rt
, rs
, rd
, rr
;
13466 uint32_t op
, minor
, mips32_op
;
13467 uint32_t cond
, fmt
, cc
;
13469 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13470 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13472 rt
= (ctx
->opcode
>> 21) & 0x1f;
13473 rs
= (ctx
->opcode
>> 16) & 0x1f;
13474 rd
= (ctx
->opcode
>> 11) & 0x1f;
13475 rr
= (ctx
->opcode
>> 6) & 0x1f;
13476 imm
= (int16_t) ctx
->opcode
;
13478 op
= (ctx
->opcode
>> 26) & 0x3f;
13481 minor
= ctx
->opcode
& 0x3f;
13484 minor
= (ctx
->opcode
>> 6) & 0xf;
13487 mips32_op
= OPC_SLL
;
13490 mips32_op
= OPC_SRA
;
13493 mips32_op
= OPC_SRL
;
13496 mips32_op
= OPC_ROTR
;
13498 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13501 check_insn(ctx
, ISA_MIPS32R6
);
13502 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13505 check_insn(ctx
, ISA_MIPS32R6
);
13506 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13509 check_insn(ctx
, ISA_MIPS32R6
);
13510 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13513 goto pool32a_invalid
;
13517 minor
= (ctx
->opcode
>> 6) & 0xf;
13521 mips32_op
= OPC_ADD
;
13524 mips32_op
= OPC_ADDU
;
13527 mips32_op
= OPC_SUB
;
13530 mips32_op
= OPC_SUBU
;
13533 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13534 mips32_op
= OPC_MUL
;
13536 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13540 mips32_op
= OPC_SLLV
;
13543 mips32_op
= OPC_SRLV
;
13546 mips32_op
= OPC_SRAV
;
13549 mips32_op
= OPC_ROTRV
;
13551 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13553 /* Logical operations */
13555 mips32_op
= OPC_AND
;
13558 mips32_op
= OPC_OR
;
13561 mips32_op
= OPC_NOR
;
13564 mips32_op
= OPC_XOR
;
13566 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13568 /* Set less than */
13570 mips32_op
= OPC_SLT
;
13573 mips32_op
= OPC_SLTU
;
13575 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13578 goto pool32a_invalid
;
13582 minor
= (ctx
->opcode
>> 6) & 0xf;
13584 /* Conditional moves */
13585 case MOVN
: /* MUL */
13586 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13588 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13591 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13594 case MOVZ
: /* MUH */
13595 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13597 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13600 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13604 check_insn(ctx
, ISA_MIPS32R6
);
13605 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13608 check_insn(ctx
, ISA_MIPS32R6
);
13609 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13611 case LWXS
: /* DIV */
13612 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13614 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13617 gen_ldxs(ctx
, rs
, rt
, rd
);
13621 check_insn(ctx
, ISA_MIPS32R6
);
13622 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13625 check_insn(ctx
, ISA_MIPS32R6
);
13626 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13629 check_insn(ctx
, ISA_MIPS32R6
);
13630 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13633 goto pool32a_invalid
;
13637 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13640 check_insn(ctx
, ISA_MIPS32R6
);
13641 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13642 extract32(ctx
->opcode
, 9, 2));
13645 check_insn(ctx
, ISA_MIPS32R6
);
13646 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13647 extract32(ctx
->opcode
, 9, 2));
13650 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13653 gen_pool32axf(env
, ctx
, rt
, rs
);
13656 generate_exception_end(ctx
, EXCP_BREAK
);
13660 MIPS_INVAL("pool32a");
13661 generate_exception_end(ctx
, EXCP_RI
);
13666 minor
= (ctx
->opcode
>> 12) & 0xf;
13669 check_cp0_enabled(ctx
);
13670 /* Treat as no-op. */
13674 /* COP2: Not implemented. */
13675 generate_exception_err(ctx
, EXCP_CpU
, 2);
13677 #ifdef TARGET_MIPS64
13680 check_insn(ctx
, ISA_MIPS3
);
13681 check_mips_64(ctx
);
13686 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13688 #ifdef TARGET_MIPS64
13691 check_insn(ctx
, ISA_MIPS3
);
13692 check_mips_64(ctx
);
13697 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13700 MIPS_INVAL("pool32b");
13701 generate_exception_end(ctx
, EXCP_RI
);
13706 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13707 minor
= ctx
->opcode
& 0x3f;
13708 check_cp1_enabled(ctx
);
13711 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13712 mips32_op
= OPC_ALNV_PS
;
13715 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13716 mips32_op
= OPC_MADD_S
;
13719 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13720 mips32_op
= OPC_MADD_D
;
13723 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13724 mips32_op
= OPC_MADD_PS
;
13727 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13728 mips32_op
= OPC_MSUB_S
;
13731 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13732 mips32_op
= OPC_MSUB_D
;
13735 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13736 mips32_op
= OPC_MSUB_PS
;
13739 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13740 mips32_op
= OPC_NMADD_S
;
13743 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13744 mips32_op
= OPC_NMADD_D
;
13747 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13748 mips32_op
= OPC_NMADD_PS
;
13751 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13752 mips32_op
= OPC_NMSUB_S
;
13755 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13756 mips32_op
= OPC_NMSUB_D
;
13759 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13760 mips32_op
= OPC_NMSUB_PS
;
13762 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
13764 case CABS_COND_FMT
:
13765 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13766 cond
= (ctx
->opcode
>> 6) & 0xf;
13767 cc
= (ctx
->opcode
>> 13) & 0x7;
13768 fmt
= (ctx
->opcode
>> 10) & 0x3;
13771 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
13774 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
13777 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
13780 goto pool32f_invalid
;
13784 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13785 cond
= (ctx
->opcode
>> 6) & 0xf;
13786 cc
= (ctx
->opcode
>> 13) & 0x7;
13787 fmt
= (ctx
->opcode
>> 10) & 0x3;
13790 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
13793 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
13796 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
13799 goto pool32f_invalid
;
13803 check_insn(ctx
, ISA_MIPS32R6
);
13804 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13807 check_insn(ctx
, ISA_MIPS32R6
);
13808 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
13811 gen_pool32fxf(ctx
, rt
, rs
);
13815 switch ((ctx
->opcode
>> 6) & 0x7) {
13817 mips32_op
= OPC_PLL_PS
;
13820 mips32_op
= OPC_PLU_PS
;
13823 mips32_op
= OPC_PUL_PS
;
13826 mips32_op
= OPC_PUU_PS
;
13829 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13830 mips32_op
= OPC_CVT_PS_S
;
13832 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13835 goto pool32f_invalid
;
13839 check_insn(ctx
, ISA_MIPS32R6
);
13840 switch ((ctx
->opcode
>> 9) & 0x3) {
13842 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
13845 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
13848 goto pool32f_invalid
;
13853 switch ((ctx
->opcode
>> 6) & 0x7) {
13855 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13856 mips32_op
= OPC_LWXC1
;
13859 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13860 mips32_op
= OPC_SWXC1
;
13863 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13864 mips32_op
= OPC_LDXC1
;
13867 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13868 mips32_op
= OPC_SDXC1
;
13871 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13872 mips32_op
= OPC_LUXC1
;
13875 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13876 mips32_op
= OPC_SUXC1
;
13878 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
13881 goto pool32f_invalid
;
13885 check_insn(ctx
, ISA_MIPS32R6
);
13886 switch ((ctx
->opcode
>> 9) & 0x3) {
13888 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
13891 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
13894 goto pool32f_invalid
;
13899 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13900 fmt
= (ctx
->opcode
>> 9) & 0x3;
13901 switch ((ctx
->opcode
>> 6) & 0x7) {
13905 mips32_op
= OPC_RSQRT2_S
;
13908 mips32_op
= OPC_RSQRT2_D
;
13911 mips32_op
= OPC_RSQRT2_PS
;
13914 goto pool32f_invalid
;
13920 mips32_op
= OPC_RECIP2_S
;
13923 mips32_op
= OPC_RECIP2_D
;
13926 mips32_op
= OPC_RECIP2_PS
;
13929 goto pool32f_invalid
;
13933 mips32_op
= OPC_ADDR_PS
;
13936 mips32_op
= OPC_MULR_PS
;
13938 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
13941 goto pool32f_invalid
;
13945 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
13946 cc
= (ctx
->opcode
>> 13) & 0x7;
13947 fmt
= (ctx
->opcode
>> 9) & 0x3;
13948 switch ((ctx
->opcode
>> 6) & 0x7) {
13949 case MOVF_FMT
: /* RINT_FMT */
13950 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13954 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
13957 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
13960 goto pool32f_invalid
;
13966 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
13969 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
13973 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
13976 goto pool32f_invalid
;
13980 case MOVT_FMT
: /* CLASS_FMT */
13981 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13985 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
13988 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
13991 goto pool32f_invalid
;
13997 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14000 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14004 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14007 goto pool32f_invalid
;
14012 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14015 goto pool32f_invalid
;
14018 #define FINSN_3ARG_SDPS(prfx) \
14019 switch ((ctx->opcode >> 8) & 0x3) { \
14021 mips32_op = OPC_##prfx##_S; \
14024 mips32_op = OPC_##prfx##_D; \
14026 case FMT_SDPS_PS: \
14028 mips32_op = OPC_##prfx##_PS; \
14031 goto pool32f_invalid; \
14034 check_insn(ctx
, ISA_MIPS32R6
);
14035 switch ((ctx
->opcode
>> 9) & 0x3) {
14037 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14040 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14043 goto pool32f_invalid
;
14047 check_insn(ctx
, ISA_MIPS32R6
);
14048 switch ((ctx
->opcode
>> 9) & 0x3) {
14050 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14053 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14056 goto pool32f_invalid
;
14060 /* regular FP ops */
14061 switch ((ctx
->opcode
>> 6) & 0x3) {
14063 FINSN_3ARG_SDPS(ADD
);
14066 FINSN_3ARG_SDPS(SUB
);
14069 FINSN_3ARG_SDPS(MUL
);
14072 fmt
= (ctx
->opcode
>> 8) & 0x3;
14074 mips32_op
= OPC_DIV_D
;
14075 } else if (fmt
== 0) {
14076 mips32_op
= OPC_DIV_S
;
14078 goto pool32f_invalid
;
14082 goto pool32f_invalid
;
14087 switch ((ctx
->opcode
>> 6) & 0x7) {
14088 case MOVN_FMT
: /* SELNEZ_FMT */
14089 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14091 switch ((ctx
->opcode
>> 9) & 0x3) {
14093 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14096 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14099 goto pool32f_invalid
;
14103 FINSN_3ARG_SDPS(MOVN
);
14107 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14108 FINSN_3ARG_SDPS(MOVN
);
14110 case MOVZ_FMT
: /* SELEQZ_FMT */
14111 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14113 switch ((ctx
->opcode
>> 9) & 0x3) {
14115 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14118 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14121 goto pool32f_invalid
;
14125 FINSN_3ARG_SDPS(MOVZ
);
14129 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14130 FINSN_3ARG_SDPS(MOVZ
);
14133 check_insn(ctx
, ISA_MIPS32R6
);
14134 switch ((ctx
->opcode
>> 9) & 0x3) {
14136 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14139 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14142 goto pool32f_invalid
;
14146 check_insn(ctx
, ISA_MIPS32R6
);
14147 switch ((ctx
->opcode
>> 9) & 0x3) {
14149 mips32_op
= OPC_MADDF_S
;
14152 mips32_op
= OPC_MADDF_D
;
14155 goto pool32f_invalid
;
14159 check_insn(ctx
, ISA_MIPS32R6
);
14160 switch ((ctx
->opcode
>> 9) & 0x3) {
14162 mips32_op
= OPC_MSUBF_S
;
14165 mips32_op
= OPC_MSUBF_D
;
14168 goto pool32f_invalid
;
14172 goto pool32f_invalid
;
14176 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14180 MIPS_INVAL("pool32f");
14181 generate_exception_end(ctx
, EXCP_RI
);
14185 generate_exception_err(ctx
, EXCP_CpU
, 1);
14189 minor
= (ctx
->opcode
>> 21) & 0x1f;
14192 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14193 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14196 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14197 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14198 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14201 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14202 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14203 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14206 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14207 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14210 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14211 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14212 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14215 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14216 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14217 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14221 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14225 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14229 case TLTI
: /* BC1EQZC */
14230 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14232 check_cp1_enabled(ctx
);
14233 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14236 mips32_op
= OPC_TLTI
;
14240 case TGEI
: /* BC1NEZC */
14241 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14243 check_cp1_enabled(ctx
);
14244 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14247 mips32_op
= OPC_TGEI
;
14252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14253 mips32_op
= OPC_TLTIU
;
14256 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14257 mips32_op
= OPC_TGEIU
;
14259 case TNEI
: /* SYNCI */
14260 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14262 /* Break the TB to be able to sync copied instructions
14264 ctx
->bstate
= BS_STOP
;
14267 mips32_op
= OPC_TNEI
;
14272 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14273 mips32_op
= OPC_TEQI
;
14275 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14280 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14281 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14282 4, rs
, 0, imm
<< 1, 0);
14283 /* Compact branches don't have a delay slot, so just let
14284 the normal delay slot handling take us to the branch
14288 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14289 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14292 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14293 /* Break the TB to be able to sync copied instructions
14295 ctx
->bstate
= BS_STOP
;
14299 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14300 /* COP2: Not implemented. */
14301 generate_exception_err(ctx
, EXCP_CpU
, 2);
14304 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14305 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14308 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14309 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14312 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14313 mips32_op
= OPC_BC1FANY4
;
14316 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14317 mips32_op
= OPC_BC1TANY4
;
14320 check_insn(ctx
, ASE_MIPS3D
);
14323 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14324 check_cp1_enabled(ctx
);
14325 gen_compute_branch1(ctx
, mips32_op
,
14326 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14328 generate_exception_err(ctx
, EXCP_CpU
, 1);
14333 /* MIPS DSP: not implemented */
14336 MIPS_INVAL("pool32i");
14337 generate_exception_end(ctx
, EXCP_RI
);
14342 minor
= (ctx
->opcode
>> 12) & 0xf;
14343 offset
= sextract32(ctx
->opcode
, 0,
14344 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14347 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14348 mips32_op
= OPC_LWL
;
14351 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14352 mips32_op
= OPC_SWL
;
14355 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14356 mips32_op
= OPC_LWR
;
14359 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14360 mips32_op
= OPC_SWR
;
14362 #if defined(TARGET_MIPS64)
14364 check_insn(ctx
, ISA_MIPS3
);
14365 check_mips_64(ctx
);
14366 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14367 mips32_op
= OPC_LDL
;
14370 check_insn(ctx
, ISA_MIPS3
);
14371 check_mips_64(ctx
);
14372 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14373 mips32_op
= OPC_SDL
;
14376 check_insn(ctx
, ISA_MIPS3
);
14377 check_mips_64(ctx
);
14378 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14379 mips32_op
= OPC_LDR
;
14382 check_insn(ctx
, ISA_MIPS3
);
14383 check_mips_64(ctx
);
14384 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14385 mips32_op
= OPC_SDR
;
14388 check_insn(ctx
, ISA_MIPS3
);
14389 check_mips_64(ctx
);
14390 mips32_op
= OPC_LWU
;
14393 check_insn(ctx
, ISA_MIPS3
);
14394 check_mips_64(ctx
);
14395 mips32_op
= OPC_LLD
;
14399 mips32_op
= OPC_LL
;
14402 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14405 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14408 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14410 #if defined(TARGET_MIPS64)
14412 check_insn(ctx
, ISA_MIPS3
);
14413 check_mips_64(ctx
);
14414 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14418 /* Treat as no-op */
14419 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14420 /* hint codes 24-31 are reserved and signal RI */
14421 generate_exception(ctx
, EXCP_RI
);
14425 MIPS_INVAL("pool32c");
14426 generate_exception_end(ctx
, EXCP_RI
);
14430 case ADDI32
: /* AUI, LUI */
14431 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14433 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14436 mips32_op
= OPC_ADDI
;
14441 mips32_op
= OPC_ADDIU
;
14443 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14446 /* Logical operations */
14448 mips32_op
= OPC_ORI
;
14451 mips32_op
= OPC_XORI
;
14454 mips32_op
= OPC_ANDI
;
14456 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14459 /* Set less than immediate */
14461 mips32_op
= OPC_SLTI
;
14464 mips32_op
= OPC_SLTIU
;
14466 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14469 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14470 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14471 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14472 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14474 case JALS32
: /* BOVC, BEQC, BEQZALC */
14475 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14478 mips32_op
= OPC_BOVC
;
14479 } else if (rs
< rt
&& rs
== 0) {
14481 mips32_op
= OPC_BEQZALC
;
14484 mips32_op
= OPC_BEQC
;
14486 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14489 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14490 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14491 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14494 case BEQ32
: /* BC */
14495 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14497 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14498 sextract32(ctx
->opcode
<< 1, 0, 27));
14501 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14504 case BNE32
: /* BALC */
14505 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14507 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14508 sextract32(ctx
->opcode
<< 1, 0, 27));
14511 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14514 case J32
: /* BGTZC, BLTZC, BLTC */
14515 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14516 if (rs
== 0 && rt
!= 0) {
14518 mips32_op
= OPC_BGTZC
;
14519 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14521 mips32_op
= OPC_BLTZC
;
14524 mips32_op
= OPC_BLTC
;
14526 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14529 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14530 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14533 case JAL32
: /* BLEZC, BGEZC, BGEC */
14534 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14535 if (rs
== 0 && rt
!= 0) {
14537 mips32_op
= OPC_BLEZC
;
14538 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14540 mips32_op
= OPC_BGEZC
;
14543 mips32_op
= OPC_BGEC
;
14545 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14548 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14549 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14550 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14553 /* Floating point (COP1) */
14555 mips32_op
= OPC_LWC1
;
14558 mips32_op
= OPC_LDC1
;
14561 mips32_op
= OPC_SWC1
;
14564 mips32_op
= OPC_SDC1
;
14566 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14568 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14569 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14570 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14571 switch ((ctx
->opcode
>> 16) & 0x1f) {
14572 case ADDIUPC_00
... ADDIUPC_07
:
14573 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14576 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14579 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14581 case LWPC_08
... LWPC_0F
:
14582 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14585 generate_exception(ctx
, EXCP_RI
);
14590 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14591 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14593 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14596 case BNVC
: /* BNEC, BNEZALC */
14597 check_insn(ctx
, ISA_MIPS32R6
);
14600 mips32_op
= OPC_BNVC
;
14601 } else if (rs
< rt
&& rs
== 0) {
14603 mips32_op
= OPC_BNEZALC
;
14606 mips32_op
= OPC_BNEC
;
14608 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14610 case R6_BNEZC
: /* JIALC */
14611 check_insn(ctx
, ISA_MIPS32R6
);
14614 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14615 sextract32(ctx
->opcode
<< 1, 0, 22));
14618 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14621 case R6_BEQZC
: /* JIC */
14622 check_insn(ctx
, ISA_MIPS32R6
);
14625 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14626 sextract32(ctx
->opcode
<< 1, 0, 22));
14629 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14632 case BLEZALC
: /* BGEZALC, BGEUC */
14633 check_insn(ctx
, ISA_MIPS32R6
);
14634 if (rs
== 0 && rt
!= 0) {
14636 mips32_op
= OPC_BLEZALC
;
14637 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14639 mips32_op
= OPC_BGEZALC
;
14642 mips32_op
= OPC_BGEUC
;
14644 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14646 case BGTZALC
: /* BLTZALC, BLTUC */
14647 check_insn(ctx
, ISA_MIPS32R6
);
14648 if (rs
== 0 && rt
!= 0) {
14650 mips32_op
= OPC_BGTZALC
;
14651 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14653 mips32_op
= OPC_BLTZALC
;
14656 mips32_op
= OPC_BLTUC
;
14658 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14660 /* Loads and stores */
14662 mips32_op
= OPC_LB
;
14665 mips32_op
= OPC_LBU
;
14668 mips32_op
= OPC_LH
;
14671 mips32_op
= OPC_LHU
;
14674 mips32_op
= OPC_LW
;
14676 #ifdef TARGET_MIPS64
14678 check_insn(ctx
, ISA_MIPS3
);
14679 check_mips_64(ctx
);
14680 mips32_op
= OPC_LD
;
14683 check_insn(ctx
, ISA_MIPS3
);
14684 check_mips_64(ctx
);
14685 mips32_op
= OPC_SD
;
14689 mips32_op
= OPC_SB
;
14692 mips32_op
= OPC_SH
;
14695 mips32_op
= OPC_SW
;
14698 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14701 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14704 generate_exception_end(ctx
, EXCP_RI
);
14709 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14713 /* make sure instructions are on a halfword boundary */
14714 if (ctx
->pc
& 0x1) {
14715 env
->CP0_BadVAddr
= ctx
->pc
;
14716 generate_exception_end(ctx
, EXCP_AdEL
);
14720 op
= (ctx
->opcode
>> 10) & 0x3f;
14721 /* Enforce properly-sized instructions in a delay slot */
14722 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14723 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14725 /* POOL32A, POOL32B, POOL32I, POOL32C */
14727 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14729 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14731 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
14733 /* LB32, LH32, LWC132, LDC132, LW32 */
14734 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
14735 generate_exception_end(ctx
, EXCP_RI
);
14740 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
14742 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
14744 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
14745 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
14746 generate_exception_end(ctx
, EXCP_RI
);
14756 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14757 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
14758 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
14761 switch (ctx
->opcode
& 0x1) {
14769 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14770 /* In the Release 6 the register number location in
14771 * the instruction encoding has changed.
14773 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
14775 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
14781 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14782 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
14783 int amount
= (ctx
->opcode
>> 1) & 0x7;
14785 amount
= amount
== 0 ? 8 : amount
;
14787 switch (ctx
->opcode
& 0x1) {
14796 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
14800 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14801 gen_pool16c_r6_insn(ctx
);
14803 gen_pool16c_insn(ctx
);
14808 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14809 int rb
= 28; /* GP */
14810 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
14812 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14816 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14817 if (ctx
->opcode
& 1) {
14818 generate_exception_end(ctx
, EXCP_RI
);
14821 int enc_dest
= uMIPS_RD(ctx
->opcode
);
14822 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
14823 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
14824 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
14829 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14830 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14831 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14832 offset
= (offset
== 0xf ? -1 : offset
);
14834 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
14839 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14840 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14841 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14843 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
14848 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14849 int rb
= 29; /* SP */
14850 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14852 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14857 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
14858 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14859 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14861 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
14866 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14867 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14868 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
14870 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
14875 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14876 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14877 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
14879 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
14884 int rd
= (ctx
->opcode
>> 5) & 0x1f;
14885 int rb
= 29; /* SP */
14886 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
14888 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14893 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
14894 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
14895 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
14897 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
14902 int rd
= uMIPS_RD5(ctx
->opcode
);
14903 int rs
= uMIPS_RS5(ctx
->opcode
);
14905 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
14912 switch (ctx
->opcode
& 0x1) {
14922 switch (ctx
->opcode
& 0x1) {
14927 gen_addiur1sp(ctx
);
14931 case B16
: /* BC16 */
14932 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
14933 sextract32(ctx
->opcode
, 0, 10) << 1,
14934 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14936 case BNEZ16
: /* BNEZC16 */
14937 case BEQZ16
: /* BEQZC16 */
14938 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
14939 mmreg(uMIPS_RD(ctx
->opcode
)),
14940 0, sextract32(ctx
->opcode
, 0, 7) << 1,
14941 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
14946 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
14947 int imm
= ZIMM(ctx
->opcode
, 0, 7);
14949 imm
= (imm
== 0x7f ? -1 : imm
);
14950 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
14956 generate_exception_end(ctx
, EXCP_RI
);
14959 decode_micromips32_opc(env
, ctx
);
14966 /* SmartMIPS extension to MIPS32 */
14968 #if defined(TARGET_MIPS64)
14970 /* MDMX extension to MIPS64 */
14974 /* MIPSDSP functions. */
14975 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
14976 int rd
, int base
, int offset
)
14981 t0
= tcg_temp_new();
14984 gen_load_gpr(t0
, offset
);
14985 } else if (offset
== 0) {
14986 gen_load_gpr(t0
, base
);
14988 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
14993 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
14994 gen_store_gpr(t0
, rd
);
14997 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
14998 gen_store_gpr(t0
, rd
);
15001 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15002 gen_store_gpr(t0
, rd
);
15004 #if defined(TARGET_MIPS64)
15006 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15007 gen_store_gpr(t0
, rd
);
15014 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15015 int ret
, int v1
, int v2
)
15021 /* Treat as NOP. */
15025 v1_t
= tcg_temp_new();
15026 v2_t
= tcg_temp_new();
15028 gen_load_gpr(v1_t
, v1
);
15029 gen_load_gpr(v2_t
, v2
);
15032 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15033 case OPC_MULT_G_2E
:
15037 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15039 case OPC_ADDUH_R_QB
:
15040 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15043 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15045 case OPC_ADDQH_R_PH
:
15046 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15049 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15051 case OPC_ADDQH_R_W
:
15052 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15055 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15057 case OPC_SUBUH_R_QB
:
15058 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15061 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15063 case OPC_SUBQH_R_PH
:
15064 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15067 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15069 case OPC_SUBQH_R_W
:
15070 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15074 case OPC_ABSQ_S_PH_DSP
:
15076 case OPC_ABSQ_S_QB
:
15078 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15080 case OPC_ABSQ_S_PH
:
15082 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15086 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15088 case OPC_PRECEQ_W_PHL
:
15090 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15091 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15093 case OPC_PRECEQ_W_PHR
:
15095 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15096 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15097 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15099 case OPC_PRECEQU_PH_QBL
:
15101 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15103 case OPC_PRECEQU_PH_QBR
:
15105 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15107 case OPC_PRECEQU_PH_QBLA
:
15109 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15111 case OPC_PRECEQU_PH_QBRA
:
15113 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15115 case OPC_PRECEU_PH_QBL
:
15117 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15119 case OPC_PRECEU_PH_QBR
:
15121 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15123 case OPC_PRECEU_PH_QBLA
:
15125 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15127 case OPC_PRECEU_PH_QBRA
:
15129 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15133 case OPC_ADDU_QB_DSP
:
15137 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15139 case OPC_ADDQ_S_PH
:
15141 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15145 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15149 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15151 case OPC_ADDU_S_QB
:
15153 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15157 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15159 case OPC_ADDU_S_PH
:
15161 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15165 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15167 case OPC_SUBQ_S_PH
:
15169 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15173 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15177 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15179 case OPC_SUBU_S_QB
:
15181 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15185 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15187 case OPC_SUBU_S_PH
:
15189 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15193 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15197 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15201 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15203 case OPC_RADDU_W_QB
:
15205 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15209 case OPC_CMPU_EQ_QB_DSP
:
15211 case OPC_PRECR_QB_PH
:
15213 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15215 case OPC_PRECRQ_QB_PH
:
15217 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15219 case OPC_PRECR_SRA_PH_W
:
15222 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15223 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15225 tcg_temp_free_i32(sa_t
);
15228 case OPC_PRECR_SRA_R_PH_W
:
15231 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15232 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15234 tcg_temp_free_i32(sa_t
);
15237 case OPC_PRECRQ_PH_W
:
15239 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15241 case OPC_PRECRQ_RS_PH_W
:
15243 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15245 case OPC_PRECRQU_S_QB_PH
:
15247 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15251 #ifdef TARGET_MIPS64
15252 case OPC_ABSQ_S_QH_DSP
:
15254 case OPC_PRECEQ_L_PWL
:
15256 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15258 case OPC_PRECEQ_L_PWR
:
15260 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15262 case OPC_PRECEQ_PW_QHL
:
15264 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15266 case OPC_PRECEQ_PW_QHR
:
15268 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15270 case OPC_PRECEQ_PW_QHLA
:
15272 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15274 case OPC_PRECEQ_PW_QHRA
:
15276 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15278 case OPC_PRECEQU_QH_OBL
:
15280 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15282 case OPC_PRECEQU_QH_OBR
:
15284 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15286 case OPC_PRECEQU_QH_OBLA
:
15288 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15290 case OPC_PRECEQU_QH_OBRA
:
15292 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15294 case OPC_PRECEU_QH_OBL
:
15296 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15298 case OPC_PRECEU_QH_OBR
:
15300 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15302 case OPC_PRECEU_QH_OBLA
:
15304 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15306 case OPC_PRECEU_QH_OBRA
:
15308 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15310 case OPC_ABSQ_S_OB
:
15312 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15314 case OPC_ABSQ_S_PW
:
15316 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15318 case OPC_ABSQ_S_QH
:
15320 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15324 case OPC_ADDU_OB_DSP
:
15326 case OPC_RADDU_L_OB
:
15328 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15332 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15334 case OPC_SUBQ_S_PW
:
15336 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15340 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15342 case OPC_SUBQ_S_QH
:
15344 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15348 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15350 case OPC_SUBU_S_OB
:
15352 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15356 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15358 case OPC_SUBU_S_QH
:
15360 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15364 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15366 case OPC_SUBUH_R_OB
:
15368 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15372 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15374 case OPC_ADDQ_S_PW
:
15376 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15380 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15382 case OPC_ADDQ_S_QH
:
15384 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15388 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15390 case OPC_ADDU_S_OB
:
15392 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15396 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15398 case OPC_ADDU_S_QH
:
15400 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15404 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15406 case OPC_ADDUH_R_OB
:
15408 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15412 case OPC_CMPU_EQ_OB_DSP
:
15414 case OPC_PRECR_OB_QH
:
15416 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15418 case OPC_PRECR_SRA_QH_PW
:
15421 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15422 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15423 tcg_temp_free_i32(ret_t
);
15426 case OPC_PRECR_SRA_R_QH_PW
:
15429 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15430 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15431 tcg_temp_free_i32(sa_v
);
15434 case OPC_PRECRQ_OB_QH
:
15436 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15438 case OPC_PRECRQ_PW_L
:
15440 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15442 case OPC_PRECRQ_QH_PW
:
15444 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15446 case OPC_PRECRQ_RS_QH_PW
:
15448 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15450 case OPC_PRECRQU_S_OB_QH
:
15452 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15459 tcg_temp_free(v1_t
);
15460 tcg_temp_free(v2_t
);
15463 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15464 int ret
, int v1
, int v2
)
15472 /* Treat as NOP. */
15476 t0
= tcg_temp_new();
15477 v1_t
= tcg_temp_new();
15478 v2_t
= tcg_temp_new();
15480 tcg_gen_movi_tl(t0
, v1
);
15481 gen_load_gpr(v1_t
, v1
);
15482 gen_load_gpr(v2_t
, v2
);
15485 case OPC_SHLL_QB_DSP
:
15487 op2
= MASK_SHLL_QB(ctx
->opcode
);
15491 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15495 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15499 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15503 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15505 case OPC_SHLL_S_PH
:
15507 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15509 case OPC_SHLLV_S_PH
:
15511 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15515 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15517 case OPC_SHLLV_S_W
:
15519 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15523 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15527 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15531 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15535 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15539 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15541 case OPC_SHRA_R_QB
:
15543 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15547 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15549 case OPC_SHRAV_R_QB
:
15551 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15555 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15557 case OPC_SHRA_R_PH
:
15559 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15563 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15565 case OPC_SHRAV_R_PH
:
15567 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15571 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15573 case OPC_SHRAV_R_W
:
15575 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15577 default: /* Invalid */
15578 MIPS_INVAL("MASK SHLL.QB");
15579 generate_exception_end(ctx
, EXCP_RI
);
15584 #ifdef TARGET_MIPS64
15585 case OPC_SHLL_OB_DSP
:
15586 op2
= MASK_SHLL_OB(ctx
->opcode
);
15590 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15594 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15596 case OPC_SHLL_S_PW
:
15598 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15600 case OPC_SHLLV_S_PW
:
15602 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15606 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15610 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15614 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15618 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15620 case OPC_SHLL_S_QH
:
15622 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15624 case OPC_SHLLV_S_QH
:
15626 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15630 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15634 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15636 case OPC_SHRA_R_OB
:
15638 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15640 case OPC_SHRAV_R_OB
:
15642 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15646 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15650 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15652 case OPC_SHRA_R_PW
:
15654 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15656 case OPC_SHRAV_R_PW
:
15658 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15662 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15666 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15668 case OPC_SHRA_R_QH
:
15670 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15672 case OPC_SHRAV_R_QH
:
15674 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15678 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15682 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15686 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15690 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15692 default: /* Invalid */
15693 MIPS_INVAL("MASK SHLL.OB");
15694 generate_exception_end(ctx
, EXCP_RI
);
15702 tcg_temp_free(v1_t
);
15703 tcg_temp_free(v2_t
);
15706 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15707 int ret
, int v1
, int v2
, int check_ret
)
15713 if ((ret
== 0) && (check_ret
== 1)) {
15714 /* Treat as NOP. */
15718 t0
= tcg_temp_new_i32();
15719 v1_t
= tcg_temp_new();
15720 v2_t
= tcg_temp_new();
15722 tcg_gen_movi_i32(t0
, ret
);
15723 gen_load_gpr(v1_t
, v1
);
15724 gen_load_gpr(v2_t
, v2
);
15727 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15728 * the same mask and op1. */
15729 case OPC_MULT_G_2E
:
15733 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15736 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15739 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15741 case OPC_MULQ_RS_W
:
15742 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15746 case OPC_DPA_W_PH_DSP
:
15748 case OPC_DPAU_H_QBL
:
15750 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15752 case OPC_DPAU_H_QBR
:
15754 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15756 case OPC_DPSU_H_QBL
:
15758 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
15760 case OPC_DPSU_H_QBR
:
15762 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
15766 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15768 case OPC_DPAX_W_PH
:
15770 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15772 case OPC_DPAQ_S_W_PH
:
15774 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15776 case OPC_DPAQX_S_W_PH
:
15778 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15780 case OPC_DPAQX_SA_W_PH
:
15782 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15786 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15788 case OPC_DPSX_W_PH
:
15790 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15792 case OPC_DPSQ_S_W_PH
:
15794 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15796 case OPC_DPSQX_S_W_PH
:
15798 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15800 case OPC_DPSQX_SA_W_PH
:
15802 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15804 case OPC_MULSAQ_S_W_PH
:
15806 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15808 case OPC_DPAQ_SA_L_W
:
15810 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15812 case OPC_DPSQ_SA_L_W
:
15814 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
15816 case OPC_MAQ_S_W_PHL
:
15818 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15820 case OPC_MAQ_S_W_PHR
:
15822 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15824 case OPC_MAQ_SA_W_PHL
:
15826 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
15828 case OPC_MAQ_SA_W_PHR
:
15830 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
15832 case OPC_MULSA_W_PH
:
15834 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
15838 #ifdef TARGET_MIPS64
15839 case OPC_DPAQ_W_QH_DSP
:
15841 int ac
= ret
& 0x03;
15842 tcg_gen_movi_i32(t0
, ac
);
15847 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
15851 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
15855 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
15859 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
15863 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15865 case OPC_DPAQ_S_W_QH
:
15867 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15869 case OPC_DPAQ_SA_L_PW
:
15871 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15873 case OPC_DPAU_H_OBL
:
15875 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15877 case OPC_DPAU_H_OBR
:
15879 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15883 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15885 case OPC_DPSQ_S_W_QH
:
15887 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15889 case OPC_DPSQ_SA_L_PW
:
15891 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15893 case OPC_DPSU_H_OBL
:
15895 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
15897 case OPC_DPSU_H_OBR
:
15899 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
15901 case OPC_MAQ_S_L_PWL
:
15903 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
15905 case OPC_MAQ_S_L_PWR
:
15907 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
15909 case OPC_MAQ_S_W_QHLL
:
15911 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15913 case OPC_MAQ_SA_W_QHLL
:
15915 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
15917 case OPC_MAQ_S_W_QHLR
:
15919 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15921 case OPC_MAQ_SA_W_QHLR
:
15923 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
15925 case OPC_MAQ_S_W_QHRL
:
15927 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15929 case OPC_MAQ_SA_W_QHRL
:
15931 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
15933 case OPC_MAQ_S_W_QHRR
:
15935 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15937 case OPC_MAQ_SA_W_QHRR
:
15939 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
15941 case OPC_MULSAQ_S_L_PW
:
15943 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
15945 case OPC_MULSAQ_S_W_QH
:
15947 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
15953 case OPC_ADDU_QB_DSP
:
15955 case OPC_MULEU_S_PH_QBL
:
15957 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15959 case OPC_MULEU_S_PH_QBR
:
15961 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15963 case OPC_MULQ_RS_PH
:
15965 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15967 case OPC_MULEQ_S_W_PHL
:
15969 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15971 case OPC_MULEQ_S_W_PHR
:
15973 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15975 case OPC_MULQ_S_PH
:
15977 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15981 #ifdef TARGET_MIPS64
15982 case OPC_ADDU_OB_DSP
:
15984 case OPC_MULEQ_S_PW_QHL
:
15986 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15988 case OPC_MULEQ_S_PW_QHR
:
15990 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15992 case OPC_MULEU_S_QH_OBL
:
15994 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15996 case OPC_MULEU_S_QH_OBR
:
15998 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16000 case OPC_MULQ_RS_QH
:
16002 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16009 tcg_temp_free_i32(t0
);
16010 tcg_temp_free(v1_t
);
16011 tcg_temp_free(v2_t
);
16014 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16022 /* Treat as NOP. */
16026 t0
= tcg_temp_new();
16027 val_t
= tcg_temp_new();
16028 gen_load_gpr(val_t
, val
);
16031 case OPC_ABSQ_S_PH_DSP
:
16035 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16040 target_long result
;
16041 imm
= (ctx
->opcode
>> 16) & 0xFF;
16042 result
= (uint32_t)imm
<< 24 |
16043 (uint32_t)imm
<< 16 |
16044 (uint32_t)imm
<< 8 |
16046 result
= (int32_t)result
;
16047 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16052 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16053 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16054 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16055 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16056 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16057 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16062 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16063 imm
= (int16_t)(imm
<< 6) >> 6;
16064 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16065 (target_long
)((int32_t)imm
<< 16 | \
16071 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16072 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16073 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16074 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16078 #ifdef TARGET_MIPS64
16079 case OPC_ABSQ_S_QH_DSP
:
16086 imm
= (ctx
->opcode
>> 16) & 0xFF;
16087 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16088 temp
= (temp
<< 16) | temp
;
16089 temp
= (temp
<< 32) | temp
;
16090 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16098 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16099 imm
= (int16_t)(imm
<< 6) >> 6;
16100 temp
= ((target_long
)imm
<< 32) \
16101 | ((target_long
)imm
& 0xFFFFFFFF);
16102 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16110 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16111 imm
= (int16_t)(imm
<< 6) >> 6;
16113 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16114 ((uint64_t)(uint16_t)imm
<< 32) |
16115 ((uint64_t)(uint16_t)imm
<< 16) |
16116 (uint64_t)(uint16_t)imm
;
16117 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16122 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16123 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16124 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16125 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16126 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16127 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16128 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16132 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16133 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16134 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16138 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16139 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16140 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16141 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16142 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16149 tcg_temp_free(val_t
);
16152 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16153 uint32_t op1
, uint32_t op2
,
16154 int ret
, int v1
, int v2
, int check_ret
)
16160 if ((ret
== 0) && (check_ret
== 1)) {
16161 /* Treat as NOP. */
16165 t1
= tcg_temp_new();
16166 v1_t
= tcg_temp_new();
16167 v2_t
= tcg_temp_new();
16169 gen_load_gpr(v1_t
, v1
);
16170 gen_load_gpr(v2_t
, v2
);
16173 case OPC_CMPU_EQ_QB_DSP
:
16175 case OPC_CMPU_EQ_QB
:
16177 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16179 case OPC_CMPU_LT_QB
:
16181 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16183 case OPC_CMPU_LE_QB
:
16185 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16187 case OPC_CMPGU_EQ_QB
:
16189 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16191 case OPC_CMPGU_LT_QB
:
16193 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16195 case OPC_CMPGU_LE_QB
:
16197 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16199 case OPC_CMPGDU_EQ_QB
:
16201 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16202 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16203 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16204 tcg_gen_shli_tl(t1
, t1
, 24);
16205 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16207 case OPC_CMPGDU_LT_QB
:
16209 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16210 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16211 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16212 tcg_gen_shli_tl(t1
, t1
, 24);
16213 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16215 case OPC_CMPGDU_LE_QB
:
16217 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16218 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16219 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16220 tcg_gen_shli_tl(t1
, t1
, 24);
16221 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16223 case OPC_CMP_EQ_PH
:
16225 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16227 case OPC_CMP_LT_PH
:
16229 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16231 case OPC_CMP_LE_PH
:
16233 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16237 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16241 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16243 case OPC_PACKRL_PH
:
16245 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16249 #ifdef TARGET_MIPS64
16250 case OPC_CMPU_EQ_OB_DSP
:
16252 case OPC_CMP_EQ_PW
:
16254 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16256 case OPC_CMP_LT_PW
:
16258 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16260 case OPC_CMP_LE_PW
:
16262 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16264 case OPC_CMP_EQ_QH
:
16266 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16268 case OPC_CMP_LT_QH
:
16270 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16272 case OPC_CMP_LE_QH
:
16274 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16276 case OPC_CMPGDU_EQ_OB
:
16278 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16280 case OPC_CMPGDU_LT_OB
:
16282 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16284 case OPC_CMPGDU_LE_OB
:
16286 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16288 case OPC_CMPGU_EQ_OB
:
16290 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16292 case OPC_CMPGU_LT_OB
:
16294 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16296 case OPC_CMPGU_LE_OB
:
16298 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16300 case OPC_CMPU_EQ_OB
:
16302 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16304 case OPC_CMPU_LT_OB
:
16306 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16308 case OPC_CMPU_LE_OB
:
16310 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16312 case OPC_PACKRL_PW
:
16314 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16318 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16322 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16326 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16334 tcg_temp_free(v1_t
);
16335 tcg_temp_free(v2_t
);
16338 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16339 uint32_t op1
, int rt
, int rs
, int sa
)
16346 /* Treat as NOP. */
16350 t0
= tcg_temp_new();
16351 gen_load_gpr(t0
, rs
);
16354 case OPC_APPEND_DSP
:
16355 switch (MASK_APPEND(ctx
->opcode
)) {
16358 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16360 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16364 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16365 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16366 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16367 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16369 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16373 if (sa
!= 0 && sa
!= 2) {
16374 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16375 tcg_gen_ext32u_tl(t0
, t0
);
16376 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16377 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16379 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16381 default: /* Invalid */
16382 MIPS_INVAL("MASK APPEND");
16383 generate_exception_end(ctx
, EXCP_RI
);
16387 #ifdef TARGET_MIPS64
16388 case OPC_DAPPEND_DSP
:
16389 switch (MASK_DAPPEND(ctx
->opcode
)) {
16392 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16396 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16397 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16398 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16402 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16403 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16404 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16409 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16410 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16411 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16412 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16415 default: /* Invalid */
16416 MIPS_INVAL("MASK DAPPEND");
16417 generate_exception_end(ctx
, EXCP_RI
);
16426 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16427 int ret
, int v1
, int v2
, int check_ret
)
16436 if ((ret
== 0) && (check_ret
== 1)) {
16437 /* Treat as NOP. */
16441 t0
= tcg_temp_new();
16442 t1
= tcg_temp_new();
16443 v1_t
= tcg_temp_new();
16444 v2_t
= tcg_temp_new();
16446 gen_load_gpr(v1_t
, v1
);
16447 gen_load_gpr(v2_t
, v2
);
16450 case OPC_EXTR_W_DSP
:
16454 tcg_gen_movi_tl(t0
, v2
);
16455 tcg_gen_movi_tl(t1
, v1
);
16456 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16459 tcg_gen_movi_tl(t0
, v2
);
16460 tcg_gen_movi_tl(t1
, v1
);
16461 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16463 case OPC_EXTR_RS_W
:
16464 tcg_gen_movi_tl(t0
, v2
);
16465 tcg_gen_movi_tl(t1
, v1
);
16466 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16469 tcg_gen_movi_tl(t0
, v2
);
16470 tcg_gen_movi_tl(t1
, v1
);
16471 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16473 case OPC_EXTRV_S_H
:
16474 tcg_gen_movi_tl(t0
, v2
);
16475 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16478 tcg_gen_movi_tl(t0
, v2
);
16479 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16481 case OPC_EXTRV_R_W
:
16482 tcg_gen_movi_tl(t0
, v2
);
16483 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16485 case OPC_EXTRV_RS_W
:
16486 tcg_gen_movi_tl(t0
, v2
);
16487 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16490 tcg_gen_movi_tl(t0
, v2
);
16491 tcg_gen_movi_tl(t1
, v1
);
16492 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16495 tcg_gen_movi_tl(t0
, v2
);
16496 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16499 tcg_gen_movi_tl(t0
, v2
);
16500 tcg_gen_movi_tl(t1
, v1
);
16501 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16504 tcg_gen_movi_tl(t0
, v2
);
16505 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16508 imm
= (ctx
->opcode
>> 20) & 0x3F;
16509 tcg_gen_movi_tl(t0
, ret
);
16510 tcg_gen_movi_tl(t1
, imm
);
16511 gen_helper_shilo(t0
, t1
, cpu_env
);
16514 tcg_gen_movi_tl(t0
, ret
);
16515 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16518 tcg_gen_movi_tl(t0
, ret
);
16519 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16522 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16523 tcg_gen_movi_tl(t0
, imm
);
16524 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16527 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16528 tcg_gen_movi_tl(t0
, imm
);
16529 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16533 #ifdef TARGET_MIPS64
16534 case OPC_DEXTR_W_DSP
:
16538 tcg_gen_movi_tl(t0
, ret
);
16539 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16543 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16544 int ac
= (ctx
->opcode
>> 11) & 0x03;
16545 tcg_gen_movi_tl(t0
, shift
);
16546 tcg_gen_movi_tl(t1
, ac
);
16547 gen_helper_dshilo(t0
, t1
, cpu_env
);
16552 int ac
= (ctx
->opcode
>> 11) & 0x03;
16553 tcg_gen_movi_tl(t0
, ac
);
16554 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16558 tcg_gen_movi_tl(t0
, v2
);
16559 tcg_gen_movi_tl(t1
, v1
);
16561 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16564 tcg_gen_movi_tl(t0
, v2
);
16565 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16568 tcg_gen_movi_tl(t0
, v2
);
16569 tcg_gen_movi_tl(t1
, v1
);
16570 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16573 tcg_gen_movi_tl(t0
, v2
);
16574 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16577 tcg_gen_movi_tl(t0
, v2
);
16578 tcg_gen_movi_tl(t1
, v1
);
16579 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16581 case OPC_DEXTR_R_L
:
16582 tcg_gen_movi_tl(t0
, v2
);
16583 tcg_gen_movi_tl(t1
, v1
);
16584 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16586 case OPC_DEXTR_RS_L
:
16587 tcg_gen_movi_tl(t0
, v2
);
16588 tcg_gen_movi_tl(t1
, v1
);
16589 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16592 tcg_gen_movi_tl(t0
, v2
);
16593 tcg_gen_movi_tl(t1
, v1
);
16594 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16596 case OPC_DEXTR_R_W
:
16597 tcg_gen_movi_tl(t0
, v2
);
16598 tcg_gen_movi_tl(t1
, v1
);
16599 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16601 case OPC_DEXTR_RS_W
:
16602 tcg_gen_movi_tl(t0
, v2
);
16603 tcg_gen_movi_tl(t1
, v1
);
16604 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16606 case OPC_DEXTR_S_H
:
16607 tcg_gen_movi_tl(t0
, v2
);
16608 tcg_gen_movi_tl(t1
, v1
);
16609 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16611 case OPC_DEXTRV_S_H
:
16612 tcg_gen_movi_tl(t0
, v2
);
16613 tcg_gen_movi_tl(t1
, v1
);
16614 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16617 tcg_gen_movi_tl(t0
, v2
);
16618 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16620 case OPC_DEXTRV_R_L
:
16621 tcg_gen_movi_tl(t0
, v2
);
16622 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16624 case OPC_DEXTRV_RS_L
:
16625 tcg_gen_movi_tl(t0
, v2
);
16626 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16629 tcg_gen_movi_tl(t0
, v2
);
16630 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16632 case OPC_DEXTRV_R_W
:
16633 tcg_gen_movi_tl(t0
, v2
);
16634 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16636 case OPC_DEXTRV_RS_W
:
16637 tcg_gen_movi_tl(t0
, v2
);
16638 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16647 tcg_temp_free(v1_t
);
16648 tcg_temp_free(v2_t
);
16651 /* End MIPSDSP functions. */
16653 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16655 int rs
, rt
, rd
, sa
;
16658 rs
= (ctx
->opcode
>> 21) & 0x1f;
16659 rt
= (ctx
->opcode
>> 16) & 0x1f;
16660 rd
= (ctx
->opcode
>> 11) & 0x1f;
16661 sa
= (ctx
->opcode
>> 6) & 0x1f;
16663 op1
= MASK_SPECIAL(ctx
->opcode
);
16666 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16668 case OPC_MULT
... OPC_DIVU
:
16669 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16679 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16682 MIPS_INVAL("special_r6 muldiv");
16683 generate_exception_end(ctx
, EXCP_RI
);
16689 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16693 if (rt
== 0 && sa
== 1) {
16694 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16695 We need additionally to check other fields */
16696 gen_cl(ctx
, op1
, rd
, rs
);
16698 generate_exception_end(ctx
, EXCP_RI
);
16702 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16703 gen_helper_do_semihosting(cpu_env
);
16705 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16706 generate_exception_end(ctx
, EXCP_RI
);
16708 generate_exception_end(ctx
, EXCP_DBp
);
16712 #if defined(TARGET_MIPS64)
16714 check_mips_64(ctx
);
16715 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16719 if (rt
== 0 && sa
== 1) {
16720 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16721 We need additionally to check other fields */
16722 check_mips_64(ctx
);
16723 gen_cl(ctx
, op1
, rd
, rs
);
16725 generate_exception_end(ctx
, EXCP_RI
);
16728 case OPC_DMULT
... OPC_DDIVU
:
16729 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16739 check_mips_64(ctx
);
16740 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16743 MIPS_INVAL("special_r6 muldiv");
16744 generate_exception_end(ctx
, EXCP_RI
);
16749 default: /* Invalid */
16750 MIPS_INVAL("special_r6");
16751 generate_exception_end(ctx
, EXCP_RI
);
16756 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
16758 int rs
, rt
, rd
, sa
;
16761 rs
= (ctx
->opcode
>> 21) & 0x1f;
16762 rt
= (ctx
->opcode
>> 16) & 0x1f;
16763 rd
= (ctx
->opcode
>> 11) & 0x1f;
16764 sa
= (ctx
->opcode
>> 6) & 0x1f;
16766 op1
= MASK_SPECIAL(ctx
->opcode
);
16768 case OPC_MOVN
: /* Conditional move */
16770 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
16771 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
16772 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16774 case OPC_MFHI
: /* Move from HI/LO */
16776 gen_HILO(ctx
, op1
, rs
& 3, rd
);
16779 case OPC_MTLO
: /* Move to HI/LO */
16780 gen_HILO(ctx
, op1
, rd
& 3, rs
);
16783 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
16784 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16785 check_cp1_enabled(ctx
);
16786 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
16787 (ctx
->opcode
>> 16) & 1);
16789 generate_exception_err(ctx
, EXCP_CpU
, 1);
16795 check_insn(ctx
, INSN_VR54XX
);
16796 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
16797 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
16799 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
16804 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16806 #if defined(TARGET_MIPS64)
16807 case OPC_DMULT
... OPC_DDIVU
:
16808 check_insn(ctx
, ISA_MIPS3
);
16809 check_mips_64(ctx
);
16810 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
16814 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16817 #ifdef MIPS_STRICT_STANDARD
16818 MIPS_INVAL("SPIM");
16819 generate_exception_end(ctx
, EXCP_RI
);
16821 /* Implemented as RI exception for now. */
16822 MIPS_INVAL("spim (unofficial)");
16823 generate_exception_end(ctx
, EXCP_RI
);
16826 default: /* Invalid */
16827 MIPS_INVAL("special_legacy");
16828 generate_exception_end(ctx
, EXCP_RI
);
16833 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
16835 int rs
, rt
, rd
, sa
;
16838 rs
= (ctx
->opcode
>> 21) & 0x1f;
16839 rt
= (ctx
->opcode
>> 16) & 0x1f;
16840 rd
= (ctx
->opcode
>> 11) & 0x1f;
16841 sa
= (ctx
->opcode
>> 6) & 0x1f;
16843 op1
= MASK_SPECIAL(ctx
->opcode
);
16845 case OPC_SLL
: /* Shift with immediate */
16846 if (sa
== 5 && rd
== 0 &&
16847 rs
== 0 && rt
== 0) { /* PAUSE */
16848 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
16849 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16850 generate_exception_end(ctx
, EXCP_RI
);
16856 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16859 switch ((ctx
->opcode
>> 21) & 0x1f) {
16861 /* rotr is decoded as srl on non-R2 CPUs */
16862 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16867 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16870 generate_exception_end(ctx
, EXCP_RI
);
16874 case OPC_ADD
... OPC_SUBU
:
16875 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16877 case OPC_SLLV
: /* Shifts */
16879 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16882 switch ((ctx
->opcode
>> 6) & 0x1f) {
16884 /* rotrv is decoded as srlv on non-R2 CPUs */
16885 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16890 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16893 generate_exception_end(ctx
, EXCP_RI
);
16897 case OPC_SLT
: /* Set on less than */
16899 gen_slt(ctx
, op1
, rd
, rs
, rt
);
16901 case OPC_AND
: /* Logic*/
16905 gen_logic(ctx
, op1
, rd
, rs
, rt
);
16908 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
16910 case OPC_TGE
... OPC_TEQ
: /* Traps */
16912 check_insn(ctx
, ISA_MIPS2
);
16913 gen_trap(ctx
, op1
, rs
, rt
, -1);
16915 case OPC_LSA
: /* OPC_PMON */
16916 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
16917 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
16918 decode_opc_special_r6(env
, ctx
);
16920 /* Pmon entry point, also R4010 selsl */
16921 #ifdef MIPS_STRICT_STANDARD
16922 MIPS_INVAL("PMON / selsl");
16923 generate_exception_end(ctx
, EXCP_RI
);
16925 gen_helper_0e0i(pmon
, sa
);
16930 generate_exception_end(ctx
, EXCP_SYSCALL
);
16933 generate_exception_end(ctx
, EXCP_BREAK
);
16936 check_insn(ctx
, ISA_MIPS2
);
16937 /* Treat as NOP. */
16940 #if defined(TARGET_MIPS64)
16941 /* MIPS64 specific opcodes */
16946 check_insn(ctx
, ISA_MIPS3
);
16947 check_mips_64(ctx
);
16948 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16951 switch ((ctx
->opcode
>> 21) & 0x1f) {
16953 /* drotr is decoded as dsrl on non-R2 CPUs */
16954 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16959 check_insn(ctx
, ISA_MIPS3
);
16960 check_mips_64(ctx
);
16961 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16964 generate_exception_end(ctx
, EXCP_RI
);
16969 switch ((ctx
->opcode
>> 21) & 0x1f) {
16971 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
16972 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
16977 check_insn(ctx
, ISA_MIPS3
);
16978 check_mips_64(ctx
);
16979 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
16982 generate_exception_end(ctx
, EXCP_RI
);
16986 case OPC_DADD
... OPC_DSUBU
:
16987 check_insn(ctx
, ISA_MIPS3
);
16988 check_mips_64(ctx
);
16989 gen_arith(ctx
, op1
, rd
, rs
, rt
);
16993 check_insn(ctx
, ISA_MIPS3
);
16994 check_mips_64(ctx
);
16995 gen_shift(ctx
, op1
, rd
, rs
, rt
);
16998 switch ((ctx
->opcode
>> 6) & 0x1f) {
17000 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17001 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17006 check_insn(ctx
, ISA_MIPS3
);
17007 check_mips_64(ctx
);
17008 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17011 generate_exception_end(ctx
, EXCP_RI
);
17016 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17017 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17018 decode_opc_special_r6(env
, ctx
);
17023 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17024 decode_opc_special_r6(env
, ctx
);
17026 decode_opc_special_legacy(env
, ctx
);
17031 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17036 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17038 rs
= (ctx
->opcode
>> 21) & 0x1f;
17039 rt
= (ctx
->opcode
>> 16) & 0x1f;
17040 rd
= (ctx
->opcode
>> 11) & 0x1f;
17042 op1
= MASK_SPECIAL2(ctx
->opcode
);
17044 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17045 case OPC_MSUB
... OPC_MSUBU
:
17046 check_insn(ctx
, ISA_MIPS32
);
17047 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17050 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17053 case OPC_DIVU_G_2F
:
17054 case OPC_MULT_G_2F
:
17055 case OPC_MULTU_G_2F
:
17057 case OPC_MODU_G_2F
:
17058 check_insn(ctx
, INSN_LOONGSON2F
);
17059 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17063 check_insn(ctx
, ISA_MIPS32
);
17064 gen_cl(ctx
, op1
, rd
, rs
);
17067 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17068 gen_helper_do_semihosting(cpu_env
);
17070 /* XXX: not clear which exception should be raised
17071 * when in debug mode...
17073 check_insn(ctx
, ISA_MIPS32
);
17074 generate_exception_end(ctx
, EXCP_DBp
);
17077 #if defined(TARGET_MIPS64)
17080 check_insn(ctx
, ISA_MIPS64
);
17081 check_mips_64(ctx
);
17082 gen_cl(ctx
, op1
, rd
, rs
);
17084 case OPC_DMULT_G_2F
:
17085 case OPC_DMULTU_G_2F
:
17086 case OPC_DDIV_G_2F
:
17087 case OPC_DDIVU_G_2F
:
17088 case OPC_DMOD_G_2F
:
17089 case OPC_DMODU_G_2F
:
17090 check_insn(ctx
, INSN_LOONGSON2F
);
17091 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17094 default: /* Invalid */
17095 MIPS_INVAL("special2_legacy");
17096 generate_exception_end(ctx
, EXCP_RI
);
17101 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17103 int rs
, rt
, rd
, sa
;
17107 rs
= (ctx
->opcode
>> 21) & 0x1f;
17108 rt
= (ctx
->opcode
>> 16) & 0x1f;
17109 rd
= (ctx
->opcode
>> 11) & 0x1f;
17110 sa
= (ctx
->opcode
>> 6) & 0x1f;
17111 imm
= (int16_t)ctx
->opcode
>> 7;
17113 op1
= MASK_SPECIAL3(ctx
->opcode
);
17117 /* hint codes 24-31 are reserved and signal RI */
17118 generate_exception_end(ctx
, EXCP_RI
);
17120 /* Treat as NOP. */
17123 /* Treat as NOP. */
17126 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17129 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17134 /* Treat as NOP. */
17137 op2
= MASK_BSHFL(ctx
->opcode
);
17139 case OPC_ALIGN
... OPC_ALIGN_END
:
17140 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17143 gen_bitswap(ctx
, op2
, rd
, rt
);
17148 #if defined(TARGET_MIPS64)
17150 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17153 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17156 check_mips_64(ctx
);
17159 /* Treat as NOP. */
17162 op2
= MASK_DBSHFL(ctx
->opcode
);
17164 case OPC_DALIGN
... OPC_DALIGN_END
:
17165 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17168 gen_bitswap(ctx
, op2
, rd
, rt
);
17175 default: /* Invalid */
17176 MIPS_INVAL("special3_r6");
17177 generate_exception_end(ctx
, EXCP_RI
);
17182 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17187 rs
= (ctx
->opcode
>> 21) & 0x1f;
17188 rt
= (ctx
->opcode
>> 16) & 0x1f;
17189 rd
= (ctx
->opcode
>> 11) & 0x1f;
17191 op1
= MASK_SPECIAL3(ctx
->opcode
);
17193 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17194 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17195 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17196 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17197 * the same mask and op1. */
17198 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17199 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17202 case OPC_ADDUH_R_QB
:
17204 case OPC_ADDQH_R_PH
:
17206 case OPC_ADDQH_R_W
:
17208 case OPC_SUBUH_R_QB
:
17210 case OPC_SUBQH_R_PH
:
17212 case OPC_SUBQH_R_W
:
17213 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17218 case OPC_MULQ_RS_W
:
17219 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17222 MIPS_INVAL("MASK ADDUH.QB");
17223 generate_exception_end(ctx
, EXCP_RI
);
17226 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17227 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17229 generate_exception_end(ctx
, EXCP_RI
);
17233 op2
= MASK_LX(ctx
->opcode
);
17235 #if defined(TARGET_MIPS64)
17241 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17243 default: /* Invalid */
17244 MIPS_INVAL("MASK LX");
17245 generate_exception_end(ctx
, EXCP_RI
);
17249 case OPC_ABSQ_S_PH_DSP
:
17250 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17252 case OPC_ABSQ_S_QB
:
17253 case OPC_ABSQ_S_PH
:
17255 case OPC_PRECEQ_W_PHL
:
17256 case OPC_PRECEQ_W_PHR
:
17257 case OPC_PRECEQU_PH_QBL
:
17258 case OPC_PRECEQU_PH_QBR
:
17259 case OPC_PRECEQU_PH_QBLA
:
17260 case OPC_PRECEQU_PH_QBRA
:
17261 case OPC_PRECEU_PH_QBL
:
17262 case OPC_PRECEU_PH_QBR
:
17263 case OPC_PRECEU_PH_QBLA
:
17264 case OPC_PRECEU_PH_QBRA
:
17265 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17272 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17275 MIPS_INVAL("MASK ABSQ_S.PH");
17276 generate_exception_end(ctx
, EXCP_RI
);
17280 case OPC_ADDU_QB_DSP
:
17281 op2
= MASK_ADDU_QB(ctx
->opcode
);
17284 case OPC_ADDQ_S_PH
:
17287 case OPC_ADDU_S_QB
:
17289 case OPC_ADDU_S_PH
:
17291 case OPC_SUBQ_S_PH
:
17294 case OPC_SUBU_S_QB
:
17296 case OPC_SUBU_S_PH
:
17300 case OPC_RADDU_W_QB
:
17301 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17303 case OPC_MULEU_S_PH_QBL
:
17304 case OPC_MULEU_S_PH_QBR
:
17305 case OPC_MULQ_RS_PH
:
17306 case OPC_MULEQ_S_W_PHL
:
17307 case OPC_MULEQ_S_W_PHR
:
17308 case OPC_MULQ_S_PH
:
17309 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17311 default: /* Invalid */
17312 MIPS_INVAL("MASK ADDU.QB");
17313 generate_exception_end(ctx
, EXCP_RI
);
17318 case OPC_CMPU_EQ_QB_DSP
:
17319 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17321 case OPC_PRECR_SRA_PH_W
:
17322 case OPC_PRECR_SRA_R_PH_W
:
17323 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17325 case OPC_PRECR_QB_PH
:
17326 case OPC_PRECRQ_QB_PH
:
17327 case OPC_PRECRQ_PH_W
:
17328 case OPC_PRECRQ_RS_PH_W
:
17329 case OPC_PRECRQU_S_QB_PH
:
17330 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17332 case OPC_CMPU_EQ_QB
:
17333 case OPC_CMPU_LT_QB
:
17334 case OPC_CMPU_LE_QB
:
17335 case OPC_CMP_EQ_PH
:
17336 case OPC_CMP_LT_PH
:
17337 case OPC_CMP_LE_PH
:
17338 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17340 case OPC_CMPGU_EQ_QB
:
17341 case OPC_CMPGU_LT_QB
:
17342 case OPC_CMPGU_LE_QB
:
17343 case OPC_CMPGDU_EQ_QB
:
17344 case OPC_CMPGDU_LT_QB
:
17345 case OPC_CMPGDU_LE_QB
:
17348 case OPC_PACKRL_PH
:
17349 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17351 default: /* Invalid */
17352 MIPS_INVAL("MASK CMPU.EQ.QB");
17353 generate_exception_end(ctx
, EXCP_RI
);
17357 case OPC_SHLL_QB_DSP
:
17358 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17360 case OPC_DPA_W_PH_DSP
:
17361 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17363 case OPC_DPAU_H_QBL
:
17364 case OPC_DPAU_H_QBR
:
17365 case OPC_DPSU_H_QBL
:
17366 case OPC_DPSU_H_QBR
:
17368 case OPC_DPAX_W_PH
:
17369 case OPC_DPAQ_S_W_PH
:
17370 case OPC_DPAQX_S_W_PH
:
17371 case OPC_DPAQX_SA_W_PH
:
17373 case OPC_DPSX_W_PH
:
17374 case OPC_DPSQ_S_W_PH
:
17375 case OPC_DPSQX_S_W_PH
:
17376 case OPC_DPSQX_SA_W_PH
:
17377 case OPC_MULSAQ_S_W_PH
:
17378 case OPC_DPAQ_SA_L_W
:
17379 case OPC_DPSQ_SA_L_W
:
17380 case OPC_MAQ_S_W_PHL
:
17381 case OPC_MAQ_S_W_PHR
:
17382 case OPC_MAQ_SA_W_PHL
:
17383 case OPC_MAQ_SA_W_PHR
:
17384 case OPC_MULSA_W_PH
:
17385 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17387 default: /* Invalid */
17388 MIPS_INVAL("MASK DPAW.PH");
17389 generate_exception_end(ctx
, EXCP_RI
);
17394 op2
= MASK_INSV(ctx
->opcode
);
17405 t0
= tcg_temp_new();
17406 t1
= tcg_temp_new();
17408 gen_load_gpr(t0
, rt
);
17409 gen_load_gpr(t1
, rs
);
17411 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17417 default: /* Invalid */
17418 MIPS_INVAL("MASK INSV");
17419 generate_exception_end(ctx
, EXCP_RI
);
17423 case OPC_APPEND_DSP
:
17424 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17426 case OPC_EXTR_W_DSP
:
17427 op2
= MASK_EXTR_W(ctx
->opcode
);
17431 case OPC_EXTR_RS_W
:
17433 case OPC_EXTRV_S_H
:
17435 case OPC_EXTRV_R_W
:
17436 case OPC_EXTRV_RS_W
:
17441 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17444 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17450 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17452 default: /* Invalid */
17453 MIPS_INVAL("MASK EXTR.W");
17454 generate_exception_end(ctx
, EXCP_RI
);
17458 #if defined(TARGET_MIPS64)
17459 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17460 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17461 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17462 check_insn(ctx
, INSN_LOONGSON2E
);
17463 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17465 case OPC_ABSQ_S_QH_DSP
:
17466 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17468 case OPC_PRECEQ_L_PWL
:
17469 case OPC_PRECEQ_L_PWR
:
17470 case OPC_PRECEQ_PW_QHL
:
17471 case OPC_PRECEQ_PW_QHR
:
17472 case OPC_PRECEQ_PW_QHLA
:
17473 case OPC_PRECEQ_PW_QHRA
:
17474 case OPC_PRECEQU_QH_OBL
:
17475 case OPC_PRECEQU_QH_OBR
:
17476 case OPC_PRECEQU_QH_OBLA
:
17477 case OPC_PRECEQU_QH_OBRA
:
17478 case OPC_PRECEU_QH_OBL
:
17479 case OPC_PRECEU_QH_OBR
:
17480 case OPC_PRECEU_QH_OBLA
:
17481 case OPC_PRECEU_QH_OBRA
:
17482 case OPC_ABSQ_S_OB
:
17483 case OPC_ABSQ_S_PW
:
17484 case OPC_ABSQ_S_QH
:
17485 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17493 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17495 default: /* Invalid */
17496 MIPS_INVAL("MASK ABSQ_S.QH");
17497 generate_exception_end(ctx
, EXCP_RI
);
17501 case OPC_ADDU_OB_DSP
:
17502 op2
= MASK_ADDU_OB(ctx
->opcode
);
17504 case OPC_RADDU_L_OB
:
17506 case OPC_SUBQ_S_PW
:
17508 case OPC_SUBQ_S_QH
:
17510 case OPC_SUBU_S_OB
:
17512 case OPC_SUBU_S_QH
:
17514 case OPC_SUBUH_R_OB
:
17516 case OPC_ADDQ_S_PW
:
17518 case OPC_ADDQ_S_QH
:
17520 case OPC_ADDU_S_OB
:
17522 case OPC_ADDU_S_QH
:
17524 case OPC_ADDUH_R_OB
:
17525 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17527 case OPC_MULEQ_S_PW_QHL
:
17528 case OPC_MULEQ_S_PW_QHR
:
17529 case OPC_MULEU_S_QH_OBL
:
17530 case OPC_MULEU_S_QH_OBR
:
17531 case OPC_MULQ_RS_QH
:
17532 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17534 default: /* Invalid */
17535 MIPS_INVAL("MASK ADDU.OB");
17536 generate_exception_end(ctx
, EXCP_RI
);
17540 case OPC_CMPU_EQ_OB_DSP
:
17541 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17543 case OPC_PRECR_SRA_QH_PW
:
17544 case OPC_PRECR_SRA_R_QH_PW
:
17545 /* Return value is rt. */
17546 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17548 case OPC_PRECR_OB_QH
:
17549 case OPC_PRECRQ_OB_QH
:
17550 case OPC_PRECRQ_PW_L
:
17551 case OPC_PRECRQ_QH_PW
:
17552 case OPC_PRECRQ_RS_QH_PW
:
17553 case OPC_PRECRQU_S_OB_QH
:
17554 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17556 case OPC_CMPU_EQ_OB
:
17557 case OPC_CMPU_LT_OB
:
17558 case OPC_CMPU_LE_OB
:
17559 case OPC_CMP_EQ_QH
:
17560 case OPC_CMP_LT_QH
:
17561 case OPC_CMP_LE_QH
:
17562 case OPC_CMP_EQ_PW
:
17563 case OPC_CMP_LT_PW
:
17564 case OPC_CMP_LE_PW
:
17565 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17567 case OPC_CMPGDU_EQ_OB
:
17568 case OPC_CMPGDU_LT_OB
:
17569 case OPC_CMPGDU_LE_OB
:
17570 case OPC_CMPGU_EQ_OB
:
17571 case OPC_CMPGU_LT_OB
:
17572 case OPC_CMPGU_LE_OB
:
17573 case OPC_PACKRL_PW
:
17577 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17579 default: /* Invalid */
17580 MIPS_INVAL("MASK CMPU_EQ.OB");
17581 generate_exception_end(ctx
, EXCP_RI
);
17585 case OPC_DAPPEND_DSP
:
17586 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17588 case OPC_DEXTR_W_DSP
:
17589 op2
= MASK_DEXTR_W(ctx
->opcode
);
17596 case OPC_DEXTR_R_L
:
17597 case OPC_DEXTR_RS_L
:
17599 case OPC_DEXTR_R_W
:
17600 case OPC_DEXTR_RS_W
:
17601 case OPC_DEXTR_S_H
:
17603 case OPC_DEXTRV_R_L
:
17604 case OPC_DEXTRV_RS_L
:
17605 case OPC_DEXTRV_S_H
:
17607 case OPC_DEXTRV_R_W
:
17608 case OPC_DEXTRV_RS_W
:
17609 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17614 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17616 default: /* Invalid */
17617 MIPS_INVAL("MASK EXTR.W");
17618 generate_exception_end(ctx
, EXCP_RI
);
17622 case OPC_DPAQ_W_QH_DSP
:
17623 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17625 case OPC_DPAU_H_OBL
:
17626 case OPC_DPAU_H_OBR
:
17627 case OPC_DPSU_H_OBL
:
17628 case OPC_DPSU_H_OBR
:
17630 case OPC_DPAQ_S_W_QH
:
17632 case OPC_DPSQ_S_W_QH
:
17633 case OPC_MULSAQ_S_W_QH
:
17634 case OPC_DPAQ_SA_L_PW
:
17635 case OPC_DPSQ_SA_L_PW
:
17636 case OPC_MULSAQ_S_L_PW
:
17637 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17639 case OPC_MAQ_S_W_QHLL
:
17640 case OPC_MAQ_S_W_QHLR
:
17641 case OPC_MAQ_S_W_QHRL
:
17642 case OPC_MAQ_S_W_QHRR
:
17643 case OPC_MAQ_SA_W_QHLL
:
17644 case OPC_MAQ_SA_W_QHLR
:
17645 case OPC_MAQ_SA_W_QHRL
:
17646 case OPC_MAQ_SA_W_QHRR
:
17647 case OPC_MAQ_S_L_PWL
:
17648 case OPC_MAQ_S_L_PWR
:
17653 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17655 default: /* Invalid */
17656 MIPS_INVAL("MASK DPAQ.W.QH");
17657 generate_exception_end(ctx
, EXCP_RI
);
17661 case OPC_DINSV_DSP
:
17662 op2
= MASK_INSV(ctx
->opcode
);
17673 t0
= tcg_temp_new();
17674 t1
= tcg_temp_new();
17676 gen_load_gpr(t0
, rt
);
17677 gen_load_gpr(t1
, rs
);
17679 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17685 default: /* Invalid */
17686 MIPS_INVAL("MASK DINSV");
17687 generate_exception_end(ctx
, EXCP_RI
);
17691 case OPC_SHLL_OB_DSP
:
17692 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17695 default: /* Invalid */
17696 MIPS_INVAL("special3_legacy");
17697 generate_exception_end(ctx
, EXCP_RI
);
17702 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17704 int rs
, rt
, rd
, sa
;
17707 rs
= (ctx
->opcode
>> 21) & 0x1f;
17708 rt
= (ctx
->opcode
>> 16) & 0x1f;
17709 rd
= (ctx
->opcode
>> 11) & 0x1f;
17710 sa
= (ctx
->opcode
>> 6) & 0x1f;
17712 op1
= MASK_SPECIAL3(ctx
->opcode
);
17716 check_insn(ctx
, ISA_MIPS32R2
);
17717 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17720 op2
= MASK_BSHFL(ctx
->opcode
);
17722 case OPC_ALIGN
... OPC_ALIGN_END
:
17724 check_insn(ctx
, ISA_MIPS32R6
);
17725 decode_opc_special3_r6(env
, ctx
);
17728 check_insn(ctx
, ISA_MIPS32R2
);
17729 gen_bshfl(ctx
, op2
, rt
, rd
);
17733 #if defined(TARGET_MIPS64)
17734 case OPC_DEXTM
... OPC_DEXT
:
17735 case OPC_DINSM
... OPC_DINS
:
17736 check_insn(ctx
, ISA_MIPS64R2
);
17737 check_mips_64(ctx
);
17738 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17741 op2
= MASK_DBSHFL(ctx
->opcode
);
17743 case OPC_DALIGN
... OPC_DALIGN_END
:
17745 check_insn(ctx
, ISA_MIPS32R6
);
17746 decode_opc_special3_r6(env
, ctx
);
17749 check_insn(ctx
, ISA_MIPS64R2
);
17750 check_mips_64(ctx
);
17751 op2
= MASK_DBSHFL(ctx
->opcode
);
17752 gen_bshfl(ctx
, op2
, rt
, rd
);
17758 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
17761 check_insn(ctx
, ASE_MT
);
17763 TCGv t0
= tcg_temp_new();
17764 TCGv t1
= tcg_temp_new();
17766 gen_load_gpr(t0
, rt
);
17767 gen_load_gpr(t1
, rs
);
17768 gen_helper_fork(t0
, t1
);
17774 check_insn(ctx
, ASE_MT
);
17776 TCGv t0
= tcg_temp_new();
17778 gen_load_gpr(t0
, rs
);
17779 gen_helper_yield(t0
, cpu_env
, t0
);
17780 gen_store_gpr(t0
, rd
);
17785 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17786 decode_opc_special3_r6(env
, ctx
);
17788 decode_opc_special3_legacy(env
, ctx
);
17793 /* MIPS SIMD Architecture (MSA) */
17794 static inline int check_msa_access(DisasContext
*ctx
)
17796 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
17797 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
17798 generate_exception_end(ctx
, EXCP_RI
);
17802 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
17803 if (ctx
->insn_flags
& ASE_MSA
) {
17804 generate_exception_end(ctx
, EXCP_MSADIS
);
17807 generate_exception_end(ctx
, EXCP_RI
);
17814 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
17816 /* generates tcg ops to check if any element is 0 */
17817 /* Note this function only works with MSA_WRLEN = 128 */
17818 uint64_t eval_zero_or_big
= 0;
17819 uint64_t eval_big
= 0;
17820 TCGv_i64 t0
= tcg_temp_new_i64();
17821 TCGv_i64 t1
= tcg_temp_new_i64();
17824 eval_zero_or_big
= 0x0101010101010101ULL
;
17825 eval_big
= 0x8080808080808080ULL
;
17828 eval_zero_or_big
= 0x0001000100010001ULL
;
17829 eval_big
= 0x8000800080008000ULL
;
17832 eval_zero_or_big
= 0x0000000100000001ULL
;
17833 eval_big
= 0x8000000080000000ULL
;
17836 eval_zero_or_big
= 0x0000000000000001ULL
;
17837 eval_big
= 0x8000000000000000ULL
;
17840 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
17841 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
17842 tcg_gen_andi_i64(t0
, t0
, eval_big
);
17843 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
17844 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
17845 tcg_gen_andi_i64(t1
, t1
, eval_big
);
17846 tcg_gen_or_i64(t0
, t0
, t1
);
17847 /* if all bits are zero then all elements are not zero */
17848 /* if some bit is non-zero then some element is zero */
17849 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
17850 tcg_gen_trunc_i64_tl(tresult
, t0
);
17851 tcg_temp_free_i64(t0
);
17852 tcg_temp_free_i64(t1
);
17855 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
17857 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17858 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
17859 int64_t s16
= (int16_t)ctx
->opcode
;
17861 check_msa_access(ctx
);
17863 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
17864 generate_exception_end(ctx
, EXCP_RI
);
17871 TCGv_i64 t0
= tcg_temp_new_i64();
17872 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
17873 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
17874 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
17875 tcg_gen_trunc_i64_tl(bcond
, t0
);
17876 tcg_temp_free_i64(t0
);
17883 gen_check_zero_element(bcond
, df
, wt
);
17889 gen_check_zero_element(bcond
, df
, wt
);
17890 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
17894 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
17896 ctx
->hflags
|= MIPS_HFLAG_BC
;
17897 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
17900 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
17902 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
17903 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
17904 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17905 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17907 TCGv_i32 twd
= tcg_const_i32(wd
);
17908 TCGv_i32 tws
= tcg_const_i32(ws
);
17909 TCGv_i32 ti8
= tcg_const_i32(i8
);
17911 switch (MASK_MSA_I8(ctx
->opcode
)) {
17913 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
17916 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
17919 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
17922 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
17925 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
17928 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
17931 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
17937 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
17938 if (df
== DF_DOUBLE
) {
17939 generate_exception_end(ctx
, EXCP_RI
);
17941 TCGv_i32 tdf
= tcg_const_i32(df
);
17942 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
17943 tcg_temp_free_i32(tdf
);
17948 MIPS_INVAL("MSA instruction");
17949 generate_exception_end(ctx
, EXCP_RI
);
17953 tcg_temp_free_i32(twd
);
17954 tcg_temp_free_i32(tws
);
17955 tcg_temp_free_i32(ti8
);
17958 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
17960 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
17961 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
17962 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
17963 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
17964 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
17965 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
17967 TCGv_i32 tdf
= tcg_const_i32(df
);
17968 TCGv_i32 twd
= tcg_const_i32(wd
);
17969 TCGv_i32 tws
= tcg_const_i32(ws
);
17970 TCGv_i32 timm
= tcg_temp_new_i32();
17971 tcg_gen_movi_i32(timm
, u5
);
17973 switch (MASK_MSA_I5(ctx
->opcode
)) {
17975 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17978 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17980 case OPC_MAXI_S_df
:
17981 tcg_gen_movi_i32(timm
, s5
);
17982 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17984 case OPC_MAXI_U_df
:
17985 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17987 case OPC_MINI_S_df
:
17988 tcg_gen_movi_i32(timm
, s5
);
17989 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
17991 case OPC_MINI_U_df
:
17992 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
17995 tcg_gen_movi_i32(timm
, s5
);
17996 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
17998 case OPC_CLTI_S_df
:
17999 tcg_gen_movi_i32(timm
, s5
);
18000 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18002 case OPC_CLTI_U_df
:
18003 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18005 case OPC_CLEI_S_df
:
18006 tcg_gen_movi_i32(timm
, s5
);
18007 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18009 case OPC_CLEI_U_df
:
18010 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18014 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18015 tcg_gen_movi_i32(timm
, s10
);
18016 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18020 MIPS_INVAL("MSA instruction");
18021 generate_exception_end(ctx
, EXCP_RI
);
18025 tcg_temp_free_i32(tdf
);
18026 tcg_temp_free_i32(twd
);
18027 tcg_temp_free_i32(tws
);
18028 tcg_temp_free_i32(timm
);
18031 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18033 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18034 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18035 uint32_t df
= 0, m
= 0;
18036 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18037 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18044 if ((dfm
& 0x40) == 0x00) {
18047 } else if ((dfm
& 0x60) == 0x40) {
18050 } else if ((dfm
& 0x70) == 0x60) {
18053 } else if ((dfm
& 0x78) == 0x70) {
18057 generate_exception_end(ctx
, EXCP_RI
);
18061 tdf
= tcg_const_i32(df
);
18062 tm
= tcg_const_i32(m
);
18063 twd
= tcg_const_i32(wd
);
18064 tws
= tcg_const_i32(ws
);
18066 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18068 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18071 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18074 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18077 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18080 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18083 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18085 case OPC_BINSLI_df
:
18086 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18088 case OPC_BINSRI_df
:
18089 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18092 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18095 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18098 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18101 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18104 MIPS_INVAL("MSA instruction");
18105 generate_exception_end(ctx
, EXCP_RI
);
18109 tcg_temp_free_i32(tdf
);
18110 tcg_temp_free_i32(tm
);
18111 tcg_temp_free_i32(twd
);
18112 tcg_temp_free_i32(tws
);
18115 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18117 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18118 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18119 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18120 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18121 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18123 TCGv_i32 tdf
= tcg_const_i32(df
);
18124 TCGv_i32 twd
= tcg_const_i32(wd
);
18125 TCGv_i32 tws
= tcg_const_i32(ws
);
18126 TCGv_i32 twt
= tcg_const_i32(wt
);
18128 switch (MASK_MSA_3R(ctx
->opcode
)) {
18130 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18133 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18136 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18139 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18141 case OPC_SUBS_S_df
:
18142 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18145 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18148 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18151 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18154 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18157 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18159 case OPC_ADDS_A_df
:
18160 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18162 case OPC_SUBS_U_df
:
18163 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18166 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18169 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18172 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18175 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18178 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18181 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18183 case OPC_ADDS_S_df
:
18184 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18186 case OPC_SUBSUS_U_df
:
18187 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18190 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18193 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18196 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18199 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18202 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18205 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18207 case OPC_ADDS_U_df
:
18208 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18210 case OPC_SUBSUU_S_df
:
18211 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18214 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18217 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18220 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18223 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18226 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18228 case OPC_ASUB_S_df
:
18229 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18232 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18235 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18238 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18241 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18244 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18247 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18249 case OPC_ASUB_U_df
:
18250 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18253 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18256 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18259 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18262 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18264 case OPC_AVER_S_df
:
18265 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18268 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18271 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18274 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18277 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18279 case OPC_AVER_U_df
:
18280 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18283 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18286 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18289 case OPC_DOTP_S_df
:
18290 case OPC_DOTP_U_df
:
18291 case OPC_DPADD_S_df
:
18292 case OPC_DPADD_U_df
:
18293 case OPC_DPSUB_S_df
:
18294 case OPC_HADD_S_df
:
18295 case OPC_DPSUB_U_df
:
18296 case OPC_HADD_U_df
:
18297 case OPC_HSUB_S_df
:
18298 case OPC_HSUB_U_df
:
18299 if (df
== DF_BYTE
) {
18300 generate_exception_end(ctx
, EXCP_RI
);
18303 switch (MASK_MSA_3R(ctx
->opcode
)) {
18304 case OPC_DOTP_S_df
:
18305 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18307 case OPC_DOTP_U_df
:
18308 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18310 case OPC_DPADD_S_df
:
18311 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18313 case OPC_DPADD_U_df
:
18314 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18316 case OPC_DPSUB_S_df
:
18317 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18319 case OPC_HADD_S_df
:
18320 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18322 case OPC_DPSUB_U_df
:
18323 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18325 case OPC_HADD_U_df
:
18326 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18328 case OPC_HSUB_S_df
:
18329 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18331 case OPC_HSUB_U_df
:
18332 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18337 MIPS_INVAL("MSA instruction");
18338 generate_exception_end(ctx
, EXCP_RI
);
18341 tcg_temp_free_i32(twd
);
18342 tcg_temp_free_i32(tws
);
18343 tcg_temp_free_i32(twt
);
18344 tcg_temp_free_i32(tdf
);
18347 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18349 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18350 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18351 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18352 TCGv telm
= tcg_temp_new();
18353 TCGv_i32 tsr
= tcg_const_i32(source
);
18354 TCGv_i32 tdt
= tcg_const_i32(dest
);
18356 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18358 gen_load_gpr(telm
, source
);
18359 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18362 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18363 gen_store_gpr(telm
, dest
);
18366 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18369 MIPS_INVAL("MSA instruction");
18370 generate_exception_end(ctx
, EXCP_RI
);
18374 tcg_temp_free(telm
);
18375 tcg_temp_free_i32(tdt
);
18376 tcg_temp_free_i32(tsr
);
18379 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18382 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18383 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18384 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18386 TCGv_i32 tws
= tcg_const_i32(ws
);
18387 TCGv_i32 twd
= tcg_const_i32(wd
);
18388 TCGv_i32 tn
= tcg_const_i32(n
);
18389 TCGv_i32 tdf
= tcg_const_i32(df
);
18391 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18393 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18395 case OPC_SPLATI_df
:
18396 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18399 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18401 case OPC_COPY_S_df
:
18402 case OPC_COPY_U_df
:
18403 case OPC_INSERT_df
:
18404 #if !defined(TARGET_MIPS64)
18405 /* Double format valid only for MIPS64 */
18406 if (df
== DF_DOUBLE
) {
18407 generate_exception_end(ctx
, EXCP_RI
);
18411 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18412 case OPC_COPY_S_df
:
18413 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18415 case OPC_COPY_U_df
:
18416 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18418 case OPC_INSERT_df
:
18419 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18424 MIPS_INVAL("MSA instruction");
18425 generate_exception_end(ctx
, EXCP_RI
);
18427 tcg_temp_free_i32(twd
);
18428 tcg_temp_free_i32(tws
);
18429 tcg_temp_free_i32(tn
);
18430 tcg_temp_free_i32(tdf
);
18433 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18435 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18436 uint32_t df
= 0, n
= 0;
18438 if ((dfn
& 0x30) == 0x00) {
18441 } else if ((dfn
& 0x38) == 0x20) {
18444 } else if ((dfn
& 0x3c) == 0x30) {
18447 } else if ((dfn
& 0x3e) == 0x38) {
18450 } else if (dfn
== 0x3E) {
18451 /* CTCMSA, CFCMSA, MOVE.V */
18452 gen_msa_elm_3e(env
, ctx
);
18455 generate_exception_end(ctx
, EXCP_RI
);
18459 gen_msa_elm_df(env
, ctx
, df
, n
);
18462 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18464 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18465 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18466 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18467 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18468 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18470 TCGv_i32 twd
= tcg_const_i32(wd
);
18471 TCGv_i32 tws
= tcg_const_i32(ws
);
18472 TCGv_i32 twt
= tcg_const_i32(wt
);
18473 TCGv_i32 tdf
= tcg_temp_new_i32();
18475 /* adjust df value for floating-point instruction */
18476 tcg_gen_movi_i32(tdf
, df
+ 2);
18478 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18480 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18483 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18486 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18489 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18492 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18495 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18501 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18504 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18507 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18510 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18513 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18516 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18519 tcg_gen_movi_i32(tdf
, df
+ 1);
18520 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18523 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18528 case OPC_MADD_Q_df
:
18529 tcg_gen_movi_i32(tdf
, df
+ 1);
18530 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18533 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18535 case OPC_MSUB_Q_df
:
18536 tcg_gen_movi_i32(tdf
, df
+ 1);
18537 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18540 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18543 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18546 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18552 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18555 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18558 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18561 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18564 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18567 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18570 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18573 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18576 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18578 case OPC_MULR_Q_df
:
18579 tcg_gen_movi_i32(tdf
, df
+ 1);
18580 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18583 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18585 case OPC_FMIN_A_df
:
18586 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18588 case OPC_MADDR_Q_df
:
18589 tcg_gen_movi_i32(tdf
, df
+ 1);
18590 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18593 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18596 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18598 case OPC_MSUBR_Q_df
:
18599 tcg_gen_movi_i32(tdf
, df
+ 1);
18600 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18603 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18605 case OPC_FMAX_A_df
:
18606 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18609 MIPS_INVAL("MSA instruction");
18610 generate_exception_end(ctx
, EXCP_RI
);
18614 tcg_temp_free_i32(twd
);
18615 tcg_temp_free_i32(tws
);
18616 tcg_temp_free_i32(twt
);
18617 tcg_temp_free_i32(tdf
);
18620 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18622 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18623 (op & (0x7 << 18)))
18624 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18625 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18626 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18627 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18628 TCGv_i32 twd
= tcg_const_i32(wd
);
18629 TCGv_i32 tws
= tcg_const_i32(ws
);
18630 TCGv_i32 twt
= tcg_const_i32(wt
);
18631 TCGv_i32 tdf
= tcg_const_i32(df
);
18633 switch (MASK_MSA_2R(ctx
->opcode
)) {
18635 #if !defined(TARGET_MIPS64)
18636 /* Double format valid only for MIPS64 */
18637 if (df
== DF_DOUBLE
) {
18638 generate_exception_end(ctx
, EXCP_RI
);
18642 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18645 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18648 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18651 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18654 MIPS_INVAL("MSA instruction");
18655 generate_exception_end(ctx
, EXCP_RI
);
18659 tcg_temp_free_i32(twd
);
18660 tcg_temp_free_i32(tws
);
18661 tcg_temp_free_i32(twt
);
18662 tcg_temp_free_i32(tdf
);
18665 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18667 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18668 (op & (0xf << 17)))
18669 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18670 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18671 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18672 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18673 TCGv_i32 twd
= tcg_const_i32(wd
);
18674 TCGv_i32 tws
= tcg_const_i32(ws
);
18675 TCGv_i32 twt
= tcg_const_i32(wt
);
18676 /* adjust df value for floating-point instruction */
18677 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18679 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18680 case OPC_FCLASS_df
:
18681 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18683 case OPC_FTRUNC_S_df
:
18684 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18686 case OPC_FTRUNC_U_df
:
18687 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18690 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18692 case OPC_FRSQRT_df
:
18693 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18696 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18699 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18702 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18704 case OPC_FEXUPL_df
:
18705 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18707 case OPC_FEXUPR_df
:
18708 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18711 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18714 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18716 case OPC_FTINT_S_df
:
18717 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18719 case OPC_FTINT_U_df
:
18720 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18722 case OPC_FFINT_S_df
:
18723 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18725 case OPC_FFINT_U_df
:
18726 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
18730 tcg_temp_free_i32(twd
);
18731 tcg_temp_free_i32(tws
);
18732 tcg_temp_free_i32(twt
);
18733 tcg_temp_free_i32(tdf
);
18736 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
18738 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
18739 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18740 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18741 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18742 TCGv_i32 twd
= tcg_const_i32(wd
);
18743 TCGv_i32 tws
= tcg_const_i32(ws
);
18744 TCGv_i32 twt
= tcg_const_i32(wt
);
18746 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18748 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
18751 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
18754 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
18757 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
18760 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
18763 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
18766 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
18769 MIPS_INVAL("MSA instruction");
18770 generate_exception_end(ctx
, EXCP_RI
);
18774 tcg_temp_free_i32(twd
);
18775 tcg_temp_free_i32(tws
);
18776 tcg_temp_free_i32(twt
);
18779 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
18781 switch (MASK_MSA_VEC(ctx
->opcode
)) {
18789 gen_msa_vec_v(env
, ctx
);
18792 gen_msa_2r(env
, ctx
);
18795 gen_msa_2rf(env
, ctx
);
18798 MIPS_INVAL("MSA instruction");
18799 generate_exception_end(ctx
, EXCP_RI
);
18804 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
18806 uint32_t opcode
= ctx
->opcode
;
18807 check_insn(ctx
, ASE_MSA
);
18808 check_msa_access(ctx
);
18810 switch (MASK_MSA_MINOR(opcode
)) {
18811 case OPC_MSA_I8_00
:
18812 case OPC_MSA_I8_01
:
18813 case OPC_MSA_I8_02
:
18814 gen_msa_i8(env
, ctx
);
18816 case OPC_MSA_I5_06
:
18817 case OPC_MSA_I5_07
:
18818 gen_msa_i5(env
, ctx
);
18820 case OPC_MSA_BIT_09
:
18821 case OPC_MSA_BIT_0A
:
18822 gen_msa_bit(env
, ctx
);
18824 case OPC_MSA_3R_0D
:
18825 case OPC_MSA_3R_0E
:
18826 case OPC_MSA_3R_0F
:
18827 case OPC_MSA_3R_10
:
18828 case OPC_MSA_3R_11
:
18829 case OPC_MSA_3R_12
:
18830 case OPC_MSA_3R_13
:
18831 case OPC_MSA_3R_14
:
18832 case OPC_MSA_3R_15
:
18833 gen_msa_3r(env
, ctx
);
18836 gen_msa_elm(env
, ctx
);
18838 case OPC_MSA_3RF_1A
:
18839 case OPC_MSA_3RF_1B
:
18840 case OPC_MSA_3RF_1C
:
18841 gen_msa_3rf(env
, ctx
);
18844 gen_msa_vec(env
, ctx
);
18855 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
18856 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
18857 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18858 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
18860 TCGv_i32 twd
= tcg_const_i32(wd
);
18861 TCGv taddr
= tcg_temp_new();
18862 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
18864 switch (MASK_MSA_MINOR(opcode
)) {
18866 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
18869 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
18872 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
18875 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
18878 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
18881 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
18884 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
18887 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
18891 tcg_temp_free_i32(twd
);
18892 tcg_temp_free(taddr
);
18896 MIPS_INVAL("MSA instruction");
18897 generate_exception_end(ctx
, EXCP_RI
);
18903 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
18906 int rs
, rt
, rd
, sa
;
18910 /* make sure instructions are on a word boundary */
18911 if (ctx
->pc
& 0x3) {
18912 env
->CP0_BadVAddr
= ctx
->pc
;
18913 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
18917 /* Handle blikely not taken case */
18918 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
18919 TCGLabel
*l1
= gen_new_label();
18921 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
18922 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
18923 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
18927 op
= MASK_OP_MAJOR(ctx
->opcode
);
18928 rs
= (ctx
->opcode
>> 21) & 0x1f;
18929 rt
= (ctx
->opcode
>> 16) & 0x1f;
18930 rd
= (ctx
->opcode
>> 11) & 0x1f;
18931 sa
= (ctx
->opcode
>> 6) & 0x1f;
18932 imm
= (int16_t)ctx
->opcode
;
18935 decode_opc_special(env
, ctx
);
18938 decode_opc_special2_legacy(env
, ctx
);
18941 decode_opc_special3(env
, ctx
);
18944 op1
= MASK_REGIMM(ctx
->opcode
);
18946 case OPC_BLTZL
: /* REGIMM branches */
18950 check_insn(ctx
, ISA_MIPS2
);
18951 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18955 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18959 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18961 /* OPC_NAL, OPC_BAL */
18962 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
18964 generate_exception_end(ctx
, EXCP_RI
);
18967 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
18970 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
18972 check_insn(ctx
, ISA_MIPS2
);
18973 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
18974 gen_trap(ctx
, op1
, rs
, -1, imm
);
18977 check_insn(ctx
, ISA_MIPS32R2
);
18978 /* Break the TB to be able to sync copied instructions
18980 ctx
->bstate
= BS_STOP
;
18982 case OPC_BPOSGE32
: /* MIPS DSP branch */
18983 #if defined(TARGET_MIPS64)
18987 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
18989 #if defined(TARGET_MIPS64)
18991 check_insn(ctx
, ISA_MIPS32R6
);
18992 check_mips_64(ctx
);
18994 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
18998 check_insn(ctx
, ISA_MIPS32R6
);
18999 check_mips_64(ctx
);
19001 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19005 default: /* Invalid */
19006 MIPS_INVAL("regimm");
19007 generate_exception_end(ctx
, EXCP_RI
);
19012 check_cp0_enabled(ctx
);
19013 op1
= MASK_CP0(ctx
->opcode
);
19021 #if defined(TARGET_MIPS64)
19025 #ifndef CONFIG_USER_ONLY
19026 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19027 #endif /* !CONFIG_USER_ONLY */
19029 case OPC_C0_FIRST
... OPC_C0_LAST
:
19030 #ifndef CONFIG_USER_ONLY
19031 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19032 #endif /* !CONFIG_USER_ONLY */
19035 #ifndef CONFIG_USER_ONLY
19038 TCGv t0
= tcg_temp_new();
19040 op2
= MASK_MFMC0(ctx
->opcode
);
19043 check_insn(ctx
, ASE_MT
);
19044 gen_helper_dmt(t0
);
19045 gen_store_gpr(t0
, rt
);
19048 check_insn(ctx
, ASE_MT
);
19049 gen_helper_emt(t0
);
19050 gen_store_gpr(t0
, rt
);
19053 check_insn(ctx
, ASE_MT
);
19054 gen_helper_dvpe(t0
, cpu_env
);
19055 gen_store_gpr(t0
, rt
);
19058 check_insn(ctx
, ASE_MT
);
19059 gen_helper_evpe(t0
, cpu_env
);
19060 gen_store_gpr(t0
, rt
);
19063 check_insn(ctx
, ISA_MIPS32R2
);
19064 save_cpu_state(ctx
, 1);
19065 gen_helper_di(t0
, cpu_env
);
19066 gen_store_gpr(t0
, rt
);
19067 /* Stop translation as we may have switched
19068 the execution mode. */
19069 ctx
->bstate
= BS_STOP
;
19072 check_insn(ctx
, ISA_MIPS32R2
);
19073 save_cpu_state(ctx
, 1);
19074 gen_helper_ei(t0
, cpu_env
);
19075 gen_store_gpr(t0
, rt
);
19076 /* Stop translation as we may have switched
19077 the execution mode. */
19078 ctx
->bstate
= BS_STOP
;
19080 default: /* Invalid */
19081 MIPS_INVAL("mfmc0");
19082 generate_exception_end(ctx
, EXCP_RI
);
19087 #endif /* !CONFIG_USER_ONLY */
19090 check_insn(ctx
, ISA_MIPS32R2
);
19091 gen_load_srsgpr(rt
, rd
);
19094 check_insn(ctx
, ISA_MIPS32R2
);
19095 gen_store_srsgpr(rt
, rd
);
19099 generate_exception_end(ctx
, EXCP_RI
);
19103 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19104 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19105 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19106 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19109 /* Arithmetic with immediate opcode */
19110 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19114 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19116 case OPC_SLTI
: /* Set on less than with immediate opcode */
19118 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19120 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19121 case OPC_LUI
: /* OPC_AUI */
19124 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19126 case OPC_J
... OPC_JAL
: /* Jump */
19127 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19128 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19131 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19132 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19134 generate_exception_end(ctx
, EXCP_RI
);
19137 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19138 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19141 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19144 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19145 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19147 generate_exception_end(ctx
, EXCP_RI
);
19150 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19151 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19154 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19157 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19160 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19162 check_insn(ctx
, ISA_MIPS32R6
);
19163 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19164 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19167 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19170 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19172 check_insn(ctx
, ISA_MIPS32R6
);
19173 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19174 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19179 check_insn(ctx
, ISA_MIPS2
);
19180 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19184 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19186 case OPC_LL
: /* Load and stores */
19187 check_insn(ctx
, ISA_MIPS2
);
19191 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19193 case OPC_LB
... OPC_LH
:
19194 case OPC_LW
... OPC_LHU
:
19195 gen_ld(ctx
, op
, rt
, rs
, imm
);
19199 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19201 case OPC_SB
... OPC_SH
:
19203 gen_st(ctx
, op
, rt
, rs
, imm
);
19206 check_insn(ctx
, ISA_MIPS2
);
19207 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19208 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19211 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19212 check_cp0_enabled(ctx
);
19213 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19214 /* Treat as NOP. */
19217 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19218 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19219 /* Treat as NOP. */
19222 /* Floating point (COP1). */
19227 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19231 op1
= MASK_CP1(ctx
->opcode
);
19236 check_cp1_enabled(ctx
);
19237 check_insn(ctx
, ISA_MIPS32R2
);
19242 check_cp1_enabled(ctx
);
19243 gen_cp1(ctx
, op1
, rt
, rd
);
19245 #if defined(TARGET_MIPS64)
19248 check_cp1_enabled(ctx
);
19249 check_insn(ctx
, ISA_MIPS3
);
19250 check_mips_64(ctx
);
19251 gen_cp1(ctx
, op1
, rt
, rd
);
19254 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19255 check_cp1_enabled(ctx
);
19256 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19258 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19263 check_insn(ctx
, ASE_MIPS3D
);
19264 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19265 (rt
>> 2) & 0x7, imm
<< 2);
19269 check_cp1_enabled(ctx
);
19270 check_insn(ctx
, ISA_MIPS32R6
);
19271 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19275 check_cp1_enabled(ctx
);
19276 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19278 check_insn(ctx
, ASE_MIPS3D
);
19281 check_cp1_enabled(ctx
);
19282 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19283 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19284 (rt
>> 2) & 0x7, imm
<< 2);
19291 check_cp1_enabled(ctx
);
19292 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19298 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19299 check_cp1_enabled(ctx
);
19300 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19302 case R6_OPC_CMP_AF_S
:
19303 case R6_OPC_CMP_UN_S
:
19304 case R6_OPC_CMP_EQ_S
:
19305 case R6_OPC_CMP_UEQ_S
:
19306 case R6_OPC_CMP_LT_S
:
19307 case R6_OPC_CMP_ULT_S
:
19308 case R6_OPC_CMP_LE_S
:
19309 case R6_OPC_CMP_ULE_S
:
19310 case R6_OPC_CMP_SAF_S
:
19311 case R6_OPC_CMP_SUN_S
:
19312 case R6_OPC_CMP_SEQ_S
:
19313 case R6_OPC_CMP_SEUQ_S
:
19314 case R6_OPC_CMP_SLT_S
:
19315 case R6_OPC_CMP_SULT_S
:
19316 case R6_OPC_CMP_SLE_S
:
19317 case R6_OPC_CMP_SULE_S
:
19318 case R6_OPC_CMP_OR_S
:
19319 case R6_OPC_CMP_UNE_S
:
19320 case R6_OPC_CMP_NE_S
:
19321 case R6_OPC_CMP_SOR_S
:
19322 case R6_OPC_CMP_SUNE_S
:
19323 case R6_OPC_CMP_SNE_S
:
19324 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19326 case R6_OPC_CMP_AF_D
:
19327 case R6_OPC_CMP_UN_D
:
19328 case R6_OPC_CMP_EQ_D
:
19329 case R6_OPC_CMP_UEQ_D
:
19330 case R6_OPC_CMP_LT_D
:
19331 case R6_OPC_CMP_ULT_D
:
19332 case R6_OPC_CMP_LE_D
:
19333 case R6_OPC_CMP_ULE_D
:
19334 case R6_OPC_CMP_SAF_D
:
19335 case R6_OPC_CMP_SUN_D
:
19336 case R6_OPC_CMP_SEQ_D
:
19337 case R6_OPC_CMP_SEUQ_D
:
19338 case R6_OPC_CMP_SLT_D
:
19339 case R6_OPC_CMP_SULT_D
:
19340 case R6_OPC_CMP_SLE_D
:
19341 case R6_OPC_CMP_SULE_D
:
19342 case R6_OPC_CMP_OR_D
:
19343 case R6_OPC_CMP_UNE_D
:
19344 case R6_OPC_CMP_NE_D
:
19345 case R6_OPC_CMP_SOR_D
:
19346 case R6_OPC_CMP_SUNE_D
:
19347 case R6_OPC_CMP_SNE_D
:
19348 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19351 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19352 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19357 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19372 check_insn(ctx
, ASE_MSA
);
19373 gen_msa_branch(env
, ctx
, op1
);
19377 generate_exception_end(ctx
, EXCP_RI
);
19382 /* Compact branches [R6] and COP2 [non-R6] */
19383 case OPC_BC
: /* OPC_LWC2 */
19384 case OPC_BALC
: /* OPC_SWC2 */
19385 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19386 /* OPC_BC, OPC_BALC */
19387 gen_compute_compact_branch(ctx
, op
, 0, 0,
19388 sextract32(ctx
->opcode
<< 2, 0, 28));
19390 /* OPC_LWC2, OPC_SWC2 */
19391 /* COP2: Not implemented. */
19392 generate_exception_err(ctx
, EXCP_CpU
, 2);
19395 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19396 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19397 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19399 /* OPC_BEQZC, OPC_BNEZC */
19400 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19401 sextract32(ctx
->opcode
<< 2, 0, 23));
19403 /* OPC_JIC, OPC_JIALC */
19404 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19407 /* OPC_LWC2, OPC_SWC2 */
19408 /* COP2: Not implemented. */
19409 generate_exception_err(ctx
, EXCP_CpU
, 2);
19413 check_insn(ctx
, INSN_LOONGSON2F
);
19414 /* Note that these instructions use different fields. */
19415 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19419 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19420 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19421 check_cp1_enabled(ctx
);
19422 op1
= MASK_CP3(ctx
->opcode
);
19426 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19432 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19433 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19436 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19437 /* Treat as NOP. */
19440 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19454 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19455 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19459 generate_exception_end(ctx
, EXCP_RI
);
19463 generate_exception_err(ctx
, EXCP_CpU
, 1);
19467 #if defined(TARGET_MIPS64)
19468 /* MIPS64 opcodes */
19469 case OPC_LDL
... OPC_LDR
:
19471 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19475 check_insn(ctx
, ISA_MIPS3
);
19476 check_mips_64(ctx
);
19477 gen_ld(ctx
, op
, rt
, rs
, imm
);
19479 case OPC_SDL
... OPC_SDR
:
19480 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19483 check_insn(ctx
, ISA_MIPS3
);
19484 check_mips_64(ctx
);
19485 gen_st(ctx
, op
, rt
, rs
, imm
);
19488 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19489 check_insn(ctx
, ISA_MIPS3
);
19490 check_mips_64(ctx
);
19491 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19493 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19494 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19495 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19496 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19499 check_insn(ctx
, ISA_MIPS3
);
19500 check_mips_64(ctx
);
19501 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19505 check_insn(ctx
, ISA_MIPS3
);
19506 check_mips_64(ctx
);
19507 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19510 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19511 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19512 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19514 MIPS_INVAL("major opcode");
19515 generate_exception_end(ctx
, EXCP_RI
);
19519 case OPC_DAUI
: /* OPC_JALX */
19520 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19521 #if defined(TARGET_MIPS64)
19523 check_mips_64(ctx
);
19525 generate_exception(ctx
, EXCP_RI
);
19526 } else if (rt
!= 0) {
19527 TCGv t0
= tcg_temp_new();
19528 gen_load_gpr(t0
, rs
);
19529 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19533 generate_exception_end(ctx
, EXCP_RI
);
19534 MIPS_INVAL("major opcode");
19538 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19539 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19540 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19543 case OPC_MSA
: /* OPC_MDMX */
19544 /* MDMX: Not implemented. */
19548 check_insn(ctx
, ISA_MIPS32R6
);
19549 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19551 default: /* Invalid */
19552 MIPS_INVAL("major opcode");
19553 generate_exception_end(ctx
, EXCP_RI
);
19558 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19560 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19561 CPUState
*cs
= CPU(cpu
);
19563 target_ulong pc_start
;
19564 target_ulong next_page_start
;
19571 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19574 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19575 ctx
.insn_flags
= env
->insn_flags
;
19576 ctx
.CP0_Config1
= env
->CP0_Config1
;
19578 ctx
.bstate
= BS_NONE
;
19580 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19581 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19582 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19583 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19584 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19585 ctx
.PAMask
= env
->PAMask
;
19586 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19587 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19588 /* Restore delay slot state from the tb context. */
19589 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19590 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19591 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19592 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19593 restore_cpu_state(env
, &ctx
);
19594 #ifdef CONFIG_USER_ONLY
19595 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19597 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19599 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19600 MO_UNALN
: MO_ALIGN
;
19602 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19603 if (max_insns
== 0) {
19604 max_insns
= CF_COUNT_MASK
;
19606 if (max_insns
> TCG_MAX_INSNS
) {
19607 max_insns
= TCG_MAX_INSNS
;
19610 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19612 while (ctx
.bstate
== BS_NONE
) {
19613 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19616 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19617 save_cpu_state(&ctx
, 1);
19618 ctx
.bstate
= BS_BRANCH
;
19619 gen_helper_raise_exception_debug(cpu_env
);
19620 /* The address covered by the breakpoint must be included in
19621 [tb->pc, tb->pc + tb->size) in order to for it to be
19622 properly cleared -- thus we increment the PC here so that
19623 the logic setting tb->size below does the right thing. */
19625 goto done_generating
;
19628 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19632 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19633 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19634 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19636 decode_opc(env
, &ctx
);
19637 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19638 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19639 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19640 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19641 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19642 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19644 generate_exception_end(&ctx
, EXCP_RI
);
19648 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19649 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19650 MIPS_HFLAG_FBNSLOT
))) {
19651 /* force to generate branch as there is neither delay nor
19655 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19656 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19657 /* Force to generate branch as microMIPS R6 doesn't restrict
19658 branches in the forbidden slot. */
19663 gen_branch(&ctx
, insn_bytes
);
19665 ctx
.pc
+= insn_bytes
;
19667 /* Execute a branch and its delay slot as a single instruction.
19668 This is what GDB expects and is consistent with what the
19669 hardware does (e.g. if a delay slot instruction faults, the
19670 reported PC is the PC of the branch). */
19671 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19675 if (ctx
.pc
>= next_page_start
) {
19679 if (tcg_op_buf_full()) {
19683 if (num_insns
>= max_insns
)
19689 if (tb
->cflags
& CF_LAST_IO
) {
19692 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19693 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19694 gen_helper_raise_exception_debug(cpu_env
);
19696 switch (ctx
.bstate
) {
19698 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19701 save_cpu_state(&ctx
, 0);
19702 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19705 tcg_gen_exit_tb(0);
19713 gen_tb_end(tb
, num_insns
);
19715 tb
->size
= ctx
.pc
- pc_start
;
19716 tb
->icount
= num_insns
;
19720 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
19721 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
19722 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
19728 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
19732 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
19734 #define printfpr(fp) \
19737 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19738 " fd:%13g fs:%13g psu: %13g\n", \
19739 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
19740 (double)(fp)->fd, \
19741 (double)(fp)->fs[FP_ENDIAN_IDX], \
19742 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
19745 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
19746 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
19747 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
19748 " fd:%13g fs:%13g psu:%13g\n", \
19749 tmp.w[FP_ENDIAN_IDX], tmp.d, \
19751 (double)tmp.fs[FP_ENDIAN_IDX], \
19752 (double)tmp.fs[!FP_ENDIAN_IDX]); \
19757 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
19758 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
19759 get_float_exception_flags(&env
->active_fpu
.fp_status
));
19760 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
19761 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
19762 printfpr(&env
->active_fpu
.fpr
[i
]);
19768 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
19771 MIPSCPU
*cpu
= MIPS_CPU(cs
);
19772 CPUMIPSState
*env
= &cpu
->env
;
19775 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
19776 " LO=0x" TARGET_FMT_lx
" ds %04x "
19777 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
19778 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
19779 env
->hflags
, env
->btarget
, env
->bcond
);
19780 for (i
= 0; i
< 32; i
++) {
19782 cpu_fprintf(f
, "GPR%02d:", i
);
19783 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
19785 cpu_fprintf(f
, "\n");
19788 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
19789 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
19790 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
19792 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
19793 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
19794 env
->CP0_Config2
, env
->CP0_Config3
);
19795 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
19796 env
->CP0_Config4
, env
->CP0_Config5
);
19797 if (env
->hflags
& MIPS_HFLAG_FPU
)
19798 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
19801 void mips_tcg_init(void)
19806 /* Initialize various static tables. */
19810 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
19811 TCGV_UNUSED(cpu_gpr
[0]);
19812 for (i
= 1; i
< 32; i
++)
19813 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
19814 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
19817 for (i
= 0; i
< 32; i
++) {
19818 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
19820 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2]);
19821 /* The scalar floating-point unit (FPU) registers are mapped on
19822 * the MSA vector registers. */
19823 fpu_f64
[i
] = msa_wr_d
[i
* 2];
19824 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
19825 msa_wr_d
[i
* 2 + 1] =
19826 tcg_global_mem_new_i64(TCG_AREG0
, off
, msaregnames
[i
* 2 + 1]);
19829 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
19830 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
19831 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
19832 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
19833 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
19835 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
19836 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
19839 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
19840 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
19842 bcond
= tcg_global_mem_new(TCG_AREG0
,
19843 offsetof(CPUMIPSState
, bcond
), "bcond");
19844 btarget
= tcg_global_mem_new(TCG_AREG0
,
19845 offsetof(CPUMIPSState
, btarget
), "btarget");
19846 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
19847 offsetof(CPUMIPSState
, hflags
), "hflags");
19849 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
19850 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
19852 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
19853 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
19859 #include "translate_init.c"
19861 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
19865 const mips_def_t
*def
;
19867 def
= cpu_mips_find_by_name(cpu_model
);
19870 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
19872 env
->cpu_model
= def
;
19874 #ifndef CONFIG_USER_ONLY
19875 mmu_init(env
, def
);
19877 fpu_init(env
, def
);
19878 mvp_init(env
, def
);
19880 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
19885 void cpu_state_reset(CPUMIPSState
*env
)
19887 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19888 CPUState
*cs
= CPU(cpu
);
19890 /* Reset registers to their default values */
19891 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
19892 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
19893 #ifdef TARGET_WORDS_BIGENDIAN
19894 env
->CP0_Config0
|= (1 << CP0C0_BE
);
19896 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
19897 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
19898 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
19899 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
19900 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
19901 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
19902 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
19903 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
19904 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
19905 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
19906 << env
->cpu_model
->CP0_LLAddr_shift
;
19907 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
19908 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
19909 env
->CCRes
= env
->cpu_model
->CCRes
;
19910 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
19911 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
19912 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
19913 env
->current_tc
= 0;
19914 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
19915 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
19916 #if defined(TARGET_MIPS64)
19917 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
19918 env
->SEGMask
|= 3ULL << 62;
19921 env
->PABITS
= env
->cpu_model
->PABITS
;
19922 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
19923 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
19924 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
19925 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
19926 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
19927 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
19928 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
19929 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
19930 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
19931 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
19932 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
19933 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
19934 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
19935 env
->msair
= env
->cpu_model
->MSAIR
;
19936 env
->insn_flags
= env
->cpu_model
->insn_flags
;
19938 #if defined(CONFIG_USER_ONLY)
19939 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
19940 # ifdef TARGET_MIPS64
19941 /* Enable 64-bit register mode. */
19942 env
->CP0_Status
|= (1 << CP0St_PX
);
19944 # ifdef TARGET_ABI_MIPSN64
19945 /* Enable 64-bit address mode. */
19946 env
->CP0_Status
|= (1 << CP0St_UX
);
19948 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
19949 hardware registers. */
19950 env
->CP0_HWREna
|= 0x0000000F;
19951 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
19952 env
->CP0_Status
|= (1 << CP0St_CU1
);
19954 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
19955 env
->CP0_Status
|= (1 << CP0St_MX
);
19957 # if defined(TARGET_MIPS64)
19958 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
19959 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
19960 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
19961 env
->CP0_Status
|= (1 << CP0St_FR
);
19965 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
19966 /* If the exception was raised from a delay slot,
19967 come back to the jump. */
19968 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
19969 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
19971 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
19973 env
->active_tc
.PC
= (int32_t)0xBFC00000;
19974 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
19975 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
19976 env
->CP0_Wired
= 0;
19977 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
19978 if (kvm_enabled()) {
19979 env
->CP0_EBase
|= 0x40000000;
19981 env
->CP0_EBase
|= 0x80000000;
19983 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
19984 /* vectored interrupts not implemented, timer on int 7,
19985 no performance counters. */
19986 env
->CP0_IntCtl
= 0xe0000000;
19990 for (i
= 0; i
< 7; i
++) {
19991 env
->CP0_WatchLo
[i
] = 0;
19992 env
->CP0_WatchHi
[i
] = 0x80000000;
19994 env
->CP0_WatchLo
[7] = 0;
19995 env
->CP0_WatchHi
[7] = 0;
19997 /* Count register increments in debug mode, EJTAG version 1 */
19998 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20000 cpu_mips_store_count(env
, 1);
20002 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20005 /* Only TC0 on VPE 0 starts as active. */
20006 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20007 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20008 env
->tcs
[i
].CP0_TCHalt
= 1;
20010 env
->active_tc
.CP0_TCHalt
= 1;
20013 if (cs
->cpu_index
== 0) {
20014 /* VPE0 starts up enabled. */
20015 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20016 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20018 /* TC0 starts up unhalted. */
20020 env
->active_tc
.CP0_TCHalt
= 0;
20021 env
->tcs
[0].CP0_TCHalt
= 0;
20022 /* With thread 0 active. */
20023 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20024 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20028 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20029 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20030 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20031 env
->CP0_Status
|= (1 << CP0St_FR
);
20035 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20039 compute_hflags(env
);
20040 restore_rounding_mode(env
);
20041 restore_flush_mode(env
);
20042 restore_pamask(env
);
20043 cs
->exception_index
= EXCP_NONE
;
20045 if (semihosting_get_argc()) {
20046 /* UHI interface can be used to obtain argc and argv */
20047 env
->active_tc
.gpr
[4] = -1;
20051 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20052 target_ulong
*data
)
20054 env
->active_tc
.PC
= data
[0];
20055 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20056 env
->hflags
|= data
[1];
20057 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20058 case MIPS_HFLAG_BR
:
20060 case MIPS_HFLAG_BC
:
20061 case MIPS_HFLAG_BL
:
20063 env
->btarget
= data
[2];