2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
9 * Copyright (c) 2020 Philippe Mathieu-Daudé
11 * This library is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU Lesser General Public
13 * License as published by the Free Software Foundation; either
14 * version 2.1 of the License, or (at your option) any later version.
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Lesser General Public License for more details.
21 * You should have received a copy of the GNU Lesser General Public
22 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "qemu/osdep.h"
28 #include "tcg/tcg-op.h"
29 #include "exec/translator.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "semihosting/semihost.h"
35 #include "exec/translator.h"
37 #include "qemu/qemu-print.h"
38 #include "fpu_helper.h"
39 #include "translate.h"
42 * Many sysemu-only helpers are not reachable for user-only.
43 * Define stub generators here, so that we need not either sprinkle
44 * ifdefs through the translator, nor provide the helper function.
46 #define STUB_HELPER(NAME, ...) \
47 static inline void gen_helper_##NAME(__VA_ARGS__) \
48 { g_assert_not_reached(); }
50 #ifdef CONFIG_USER_ONLY
51 STUB_HELPER(cache
, TCGv_env env
, TCGv val
, TCGv_i32 reg
)
55 /* indirect opcode tables */
56 OPC_SPECIAL
= (0x00 << 26),
57 OPC_REGIMM
= (0x01 << 26),
58 OPC_CP0
= (0x10 << 26),
59 OPC_CP2
= (0x12 << 26),
60 OPC_CP3
= (0x13 << 26),
61 OPC_SPECIAL2
= (0x1C << 26),
62 OPC_SPECIAL3
= (0x1F << 26),
63 /* arithmetic with immediate */
64 OPC_ADDI
= (0x08 << 26),
65 OPC_ADDIU
= (0x09 << 26),
66 OPC_SLTI
= (0x0A << 26),
67 OPC_SLTIU
= (0x0B << 26),
68 /* logic with immediate */
69 OPC_ANDI
= (0x0C << 26),
70 OPC_ORI
= (0x0D << 26),
71 OPC_XORI
= (0x0E << 26),
72 OPC_LUI
= (0x0F << 26),
73 /* arithmetic with immediate */
74 OPC_DADDI
= (0x18 << 26),
75 OPC_DADDIU
= (0x19 << 26),
76 /* Jump and branches */
78 OPC_JAL
= (0x03 << 26),
79 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
80 OPC_BEQL
= (0x14 << 26),
81 OPC_BNE
= (0x05 << 26),
82 OPC_BNEL
= (0x15 << 26),
83 OPC_BLEZ
= (0x06 << 26),
84 OPC_BLEZL
= (0x16 << 26),
85 OPC_BGTZ
= (0x07 << 26),
86 OPC_BGTZL
= (0x17 << 26),
87 OPC_JALX
= (0x1D << 26),
88 OPC_DAUI
= (0x1D << 26),
90 OPC_LDL
= (0x1A << 26),
91 OPC_LDR
= (0x1B << 26),
92 OPC_LB
= (0x20 << 26),
93 OPC_LH
= (0x21 << 26),
94 OPC_LWL
= (0x22 << 26),
95 OPC_LW
= (0x23 << 26),
96 OPC_LWPC
= OPC_LW
| 0x5,
97 OPC_LBU
= (0x24 << 26),
98 OPC_LHU
= (0x25 << 26),
99 OPC_LWR
= (0x26 << 26),
100 OPC_LWU
= (0x27 << 26),
101 OPC_SB
= (0x28 << 26),
102 OPC_SH
= (0x29 << 26),
103 OPC_SWL
= (0x2A << 26),
104 OPC_SW
= (0x2B << 26),
105 OPC_SDL
= (0x2C << 26),
106 OPC_SDR
= (0x2D << 26),
107 OPC_SWR
= (0x2E << 26),
108 OPC_LL
= (0x30 << 26),
109 OPC_LLD
= (0x34 << 26),
110 OPC_LD
= (0x37 << 26),
111 OPC_LDPC
= OPC_LD
| 0x5,
112 OPC_SC
= (0x38 << 26),
113 OPC_SCD
= (0x3C << 26),
114 OPC_SD
= (0x3F << 26),
115 /* Floating point load/store */
116 OPC_LWC1
= (0x31 << 26),
117 OPC_LWC2
= (0x32 << 26),
118 OPC_LDC1
= (0x35 << 26),
119 OPC_LDC2
= (0x36 << 26),
120 OPC_SWC1
= (0x39 << 26),
121 OPC_SWC2
= (0x3A << 26),
122 OPC_SDC1
= (0x3D << 26),
123 OPC_SDC2
= (0x3E << 26),
124 /* Compact Branches */
125 OPC_BLEZALC
= (0x06 << 26),
126 OPC_BGEZALC
= (0x06 << 26),
127 OPC_BGEUC
= (0x06 << 26),
128 OPC_BGTZALC
= (0x07 << 26),
129 OPC_BLTZALC
= (0x07 << 26),
130 OPC_BLTUC
= (0x07 << 26),
131 OPC_BOVC
= (0x08 << 26),
132 OPC_BEQZALC
= (0x08 << 26),
133 OPC_BEQC
= (0x08 << 26),
134 OPC_BLEZC
= (0x16 << 26),
135 OPC_BGEZC
= (0x16 << 26),
136 OPC_BGEC
= (0x16 << 26),
137 OPC_BGTZC
= (0x17 << 26),
138 OPC_BLTZC
= (0x17 << 26),
139 OPC_BLTC
= (0x17 << 26),
140 OPC_BNVC
= (0x18 << 26),
141 OPC_BNEZALC
= (0x18 << 26),
142 OPC_BNEC
= (0x18 << 26),
143 OPC_BC
= (0x32 << 26),
144 OPC_BEQZC
= (0x36 << 26),
145 OPC_JIC
= (0x36 << 26),
146 OPC_BALC
= (0x3A << 26),
147 OPC_BNEZC
= (0x3E << 26),
148 OPC_JIALC
= (0x3E << 26),
149 /* MDMX ASE specific */
150 OPC_MDMX
= (0x1E << 26),
151 /* Cache and prefetch */
152 OPC_CACHE
= (0x2F << 26),
153 OPC_PREF
= (0x33 << 26),
154 /* PC-relative address computation / loads */
155 OPC_PCREL
= (0x3B << 26),
158 /* PC-relative address computation / loads */
159 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
160 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
162 /* Instructions determined by bits 19 and 20 */
163 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
164 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
165 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
167 /* Instructions determined by bits 16 ... 20 */
168 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
169 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
172 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
175 /* MIPS special opcodes */
176 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
180 OPC_SLL
= 0x00 | OPC_SPECIAL
,
181 /* NOP is SLL r0, r0, 0 */
182 /* SSNOP is SLL r0, r0, 1 */
183 /* EHB is SLL r0, r0, 3 */
184 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
185 OPC_ROTR
= OPC_SRL
| (1 << 21),
186 OPC_SRA
= 0x03 | OPC_SPECIAL
,
187 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
188 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
189 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
190 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
191 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
192 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
193 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
194 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
195 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
196 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
197 OPC_DROTR
= OPC_DSRL
| (1 << 21),
198 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
199 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
200 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
201 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
202 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
203 /* Multiplication / division */
204 OPC_MULT
= 0x18 | OPC_SPECIAL
,
205 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
206 OPC_DIV
= 0x1A | OPC_SPECIAL
,
207 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
208 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
209 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
210 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
211 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
213 /* 2 registers arithmetic / logic */
214 OPC_ADD
= 0x20 | OPC_SPECIAL
,
215 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
216 OPC_SUB
= 0x22 | OPC_SPECIAL
,
217 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
218 OPC_AND
= 0x24 | OPC_SPECIAL
,
219 OPC_OR
= 0x25 | OPC_SPECIAL
,
220 OPC_XOR
= 0x26 | OPC_SPECIAL
,
221 OPC_NOR
= 0x27 | OPC_SPECIAL
,
222 OPC_SLT
= 0x2A | OPC_SPECIAL
,
223 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
224 OPC_DADD
= 0x2C | OPC_SPECIAL
,
225 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
226 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
227 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
229 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
230 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
232 OPC_TGE
= 0x30 | OPC_SPECIAL
,
233 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
234 OPC_TLT
= 0x32 | OPC_SPECIAL
,
235 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
236 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
237 OPC_TNE
= 0x36 | OPC_SPECIAL
,
238 /* HI / LO registers load & stores */
239 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
240 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
241 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
242 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
243 /* Conditional moves */
244 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
245 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
247 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
248 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
250 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
253 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
254 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
255 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
256 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
257 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
259 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
260 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
261 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
262 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
266 * R6 Multiply and Divide instructions have the same opcode
267 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
269 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
272 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
273 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
274 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
275 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
276 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
277 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
278 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
279 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
281 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
282 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
283 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
284 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
285 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
286 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
287 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
288 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
290 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
291 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
292 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
293 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
294 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
297 /* REGIMM (rt field) opcodes */
298 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
301 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
302 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
303 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
304 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
305 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
306 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
307 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
308 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
309 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
310 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
311 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
312 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
313 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
314 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
315 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
316 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
318 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
319 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
322 /* Special2 opcodes */
323 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
326 /* Multiply & xxx operations */
327 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
328 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
329 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
330 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
331 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
333 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
334 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
335 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
336 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
337 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
338 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
339 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
340 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
341 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
342 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
343 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
344 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
346 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
347 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
348 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
349 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
351 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
354 /* Special3 opcodes */
355 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
358 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
359 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
360 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
361 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
362 OPC_INS
= 0x04 | OPC_SPECIAL3
,
363 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
364 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
365 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
366 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
367 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
368 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
369 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
370 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
371 OPC_GINV
= 0x3D | OPC_SPECIAL3
,
374 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
375 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
376 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
377 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
378 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
379 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
380 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
381 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
382 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
383 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
384 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
385 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
388 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
389 /* MIPS DSP Arithmetic */
390 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
391 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
392 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
393 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
394 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
395 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
396 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
397 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
398 /* MIPS DSP GPR-Based Shift Sub-class */
399 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
400 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
401 /* MIPS DSP Multiply Sub-class insns */
402 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
403 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
404 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
405 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
406 /* DSP Bit/Manipulation Sub-class */
407 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
408 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
409 /* MIPS DSP Append Sub-class */
410 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
411 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
412 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
413 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
414 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
417 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
418 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
419 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
420 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
421 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
422 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
423 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
424 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
425 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
426 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
427 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
428 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
429 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
430 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
431 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
432 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
435 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
436 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
437 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
438 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
439 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
440 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
443 /* Loongson EXT load/store quad word opcodes */
444 #define MASK_LOONGSON_GSLSQ(op) (MASK_OP_MAJOR(op) | (op & 0x8020))
446 OPC_GSLQ
= 0x0020 | OPC_LWC2
,
447 OPC_GSLQC1
= 0x8020 | OPC_LWC2
,
448 OPC_GSSHFL
= OPC_LWC2
,
449 OPC_GSSQ
= 0x0020 | OPC_SWC2
,
450 OPC_GSSQC1
= 0x8020 | OPC_SWC2
,
451 OPC_GSSHFS
= OPC_SWC2
,
454 /* Loongson EXT shifted load/store opcodes */
455 #define MASK_LOONGSON_GSSHFLS(op) (MASK_OP_MAJOR(op) | (op & 0xc03f))
457 OPC_GSLWLC1
= 0x4 | OPC_GSSHFL
,
458 OPC_GSLWRC1
= 0x5 | OPC_GSSHFL
,
459 OPC_GSLDLC1
= 0x6 | OPC_GSSHFL
,
460 OPC_GSLDRC1
= 0x7 | OPC_GSSHFL
,
461 OPC_GSSWLC1
= 0x4 | OPC_GSSHFS
,
462 OPC_GSSWRC1
= 0x5 | OPC_GSSHFS
,
463 OPC_GSSDLC1
= 0x6 | OPC_GSSHFS
,
464 OPC_GSSDRC1
= 0x7 | OPC_GSSHFS
,
467 /* Loongson EXT LDC2/SDC2 opcodes */
468 #define MASK_LOONGSON_LSDC2(op) (MASK_OP_MAJOR(op) | (op & 0x7))
471 OPC_GSLBX
= 0x0 | OPC_LDC2
,
472 OPC_GSLHX
= 0x1 | OPC_LDC2
,
473 OPC_GSLWX
= 0x2 | OPC_LDC2
,
474 OPC_GSLDX
= 0x3 | OPC_LDC2
,
475 OPC_GSLWXC1
= 0x6 | OPC_LDC2
,
476 OPC_GSLDXC1
= 0x7 | OPC_LDC2
,
477 OPC_GSSBX
= 0x0 | OPC_SDC2
,
478 OPC_GSSHX
= 0x1 | OPC_SDC2
,
479 OPC_GSSWX
= 0x2 | OPC_SDC2
,
480 OPC_GSSDX
= 0x3 | OPC_SDC2
,
481 OPC_GSSWXC1
= 0x6 | OPC_SDC2
,
482 OPC_GSSDXC1
= 0x7 | OPC_SDC2
,
486 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
489 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
490 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
491 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
492 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
493 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
494 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
495 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
496 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
500 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
503 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
504 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
505 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
506 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
507 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
508 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
509 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
510 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
511 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
512 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
513 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
516 /* MIPS DSP REGIMM opcodes */
518 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
519 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
522 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
525 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
526 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
527 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
528 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
531 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
533 /* MIPS DSP Arithmetic Sub-class */
534 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
535 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
536 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
537 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
538 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
539 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
540 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
541 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
542 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
543 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
544 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
545 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
546 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
547 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
548 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
549 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
550 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
551 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
552 /* MIPS DSP Multiply Sub-class insns */
553 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
554 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
555 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
556 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
557 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
558 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
561 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
562 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
564 /* MIPS DSP Arithmetic Sub-class */
565 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
566 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
567 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
568 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
569 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
570 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
571 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
572 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
573 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
574 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
575 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
576 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
577 /* MIPS DSP Multiply Sub-class insns */
578 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
579 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
580 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
581 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
584 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
586 /* MIPS DSP Arithmetic Sub-class */
587 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
588 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
589 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
590 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
591 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
592 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
593 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
594 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
595 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
596 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
597 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
598 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
599 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
600 /* DSP Bit/Manipulation Sub-class */
601 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
602 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
603 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
604 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
605 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
608 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Arithmetic Sub-class */
611 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
612 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
613 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
614 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
615 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
616 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
617 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
618 /* DSP Compare-Pick Sub-class */
619 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
620 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
621 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
622 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
623 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
624 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
625 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
626 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
627 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
628 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
629 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
630 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
631 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
632 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
633 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
636 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
638 /* MIPS DSP GPR-Based Shift Sub-class */
639 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
640 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
641 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
642 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
643 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
644 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
645 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
646 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
647 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
648 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
649 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
650 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
651 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
652 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
653 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
654 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
655 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
656 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
657 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
658 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
659 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
660 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
663 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
665 /* MIPS DSP Multiply Sub-class insns */
666 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
667 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
668 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
669 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
670 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
671 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
672 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
673 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
674 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
675 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
676 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
677 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
678 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
679 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
680 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
681 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
682 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
683 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
684 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
685 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
686 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
687 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
690 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
692 /* DSP Bit/Manipulation Sub-class */
693 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
696 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
698 /* MIPS DSP Append Sub-class */
699 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
700 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
701 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
704 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
706 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
707 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
708 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
709 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
710 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
711 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
712 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
713 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
714 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
715 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
716 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
717 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
718 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
719 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
720 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
721 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
722 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
723 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
726 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
728 /* MIPS DSP Arithmetic Sub-class */
729 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
730 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
731 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
732 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
733 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
734 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
735 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
736 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
737 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
738 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
739 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
740 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
741 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
742 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
743 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
744 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
745 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
746 /* DSP Bit/Manipulation Sub-class */
747 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
748 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
749 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
750 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
751 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
752 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
755 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
757 /* MIPS DSP Multiply Sub-class insns */
758 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
759 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
760 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
762 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
763 /* MIPS DSP Arithmetic Sub-class */
764 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
765 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
766 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
767 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
768 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
769 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
770 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
771 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
772 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
773 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
774 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
775 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
776 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
777 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
778 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
779 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
780 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
781 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
782 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
783 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
784 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
787 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
789 /* DSP Compare-Pick Sub-class */
790 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
795 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
796 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
797 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
798 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
799 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
800 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
801 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
802 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
803 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
804 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
805 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
806 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
807 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
808 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
809 /* MIPS DSP Arithmetic Sub-class */
810 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
811 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
812 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
813 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
814 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
815 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
816 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
817 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
820 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
822 /* DSP Append Sub-class */
823 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
824 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
825 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
826 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
829 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
832 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
833 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
834 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
835 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
836 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
837 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
838 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
839 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
840 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
841 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
842 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
843 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
844 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
845 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
846 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
847 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
848 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
849 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
850 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
851 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
852 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
855 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
857 /* DSP Bit/Manipulation Sub-class */
858 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
861 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
863 /* MIPS DSP Multiply Sub-class insns */
864 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
867 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
868 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
869 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
870 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
871 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
872 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
873 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
874 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
875 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
876 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
877 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
878 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
879 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
880 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
881 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
882 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
883 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
884 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
885 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
886 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
887 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
888 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
889 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
892 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
894 /* MIPS DSP GPR-Based Shift Sub-class */
895 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
898 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
899 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
900 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
901 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
902 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
903 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
904 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
905 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
906 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
907 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
908 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
909 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
910 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
911 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
912 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
913 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
914 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
915 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
916 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
917 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
918 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
919 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
920 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
923 /* Coprocessor 0 (rs field) */
924 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
927 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
928 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
929 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
930 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
931 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
932 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
933 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
934 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
935 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
936 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
937 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
938 OPC_C0
= (0x10 << 21) | OPC_CP0
,
939 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
940 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
941 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
942 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
943 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
944 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
945 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
946 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
947 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
948 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
949 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
950 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
951 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
952 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
953 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
957 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
960 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
961 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
962 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
963 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
964 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
965 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
966 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
967 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
970 /* Coprocessor 0 (with rs == C0) */
971 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
974 OPC_TLBR
= 0x01 | OPC_C0
,
975 OPC_TLBWI
= 0x02 | OPC_C0
,
976 OPC_TLBINV
= 0x03 | OPC_C0
,
977 OPC_TLBINVF
= 0x04 | OPC_C0
,
978 OPC_TLBWR
= 0x06 | OPC_C0
,
979 OPC_TLBP
= 0x08 | OPC_C0
,
980 OPC_RFE
= 0x10 | OPC_C0
,
981 OPC_ERET
= 0x18 | OPC_C0
,
982 OPC_DERET
= 0x1F | OPC_C0
,
983 OPC_WAIT
= 0x20 | OPC_C0
,
986 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 * MMI (MultiMedia Instruction) encodings
1126 * ======================================
1128 * MMI instructions encoding table keys:
1130 * * This code is reserved for future use. An attempt to execute it
1131 * causes a Reserved Instruction exception.
1132 * % This code indicates an instruction class. The instruction word
1133 * must be further decoded by examining additional tables that show
1134 * the values for other instruction fields.
1135 * # This code is reserved for the unsupported instructions DMULT,
1136 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
1137 * to execute it causes a Reserved Instruction exception.
1139 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
1142 * +--------+----------------------------------------+
1144 * +--------+----------------------------------------+
1146 * opcode bits 28..26
1147 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1148 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1149 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1150 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
1151 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
1152 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
1153 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
1154 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
1155 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
1156 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
1157 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
1161 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
1162 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
1166 * MMI instructions with opcode field = MMI:
1169 * +--------+-------------------------------+--------+
1170 * | MMI | |function|
1171 * +--------+-------------------------------+--------+
1173 * function bits 2..0
1174 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1175 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1176 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1177 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
1178 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
1179 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
1180 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
1181 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
1182 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
1183 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
1184 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
1187 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
1189 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
1190 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
1191 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
1192 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
1193 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
1194 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
1195 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
1196 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
1199 /* global register indices */
1200 TCGv cpu_gpr
[32], cpu_PC
;
1202 * For CPUs using 128-bit GPR registers, we put the lower halves in cpu_gpr[])
1203 * and the upper halves in cpu_gpr_hi[].
1205 TCGv_i64 cpu_gpr_hi
[32];
1206 TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1207 static TCGv cpu_dspctrl
, btarget
;
1209 static TCGv cpu_lladdr
, cpu_llval
;
1210 static TCGv_i32 hflags
;
1211 TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1212 TCGv_i64 fpu_f64
[32];
1214 #include "exec/gen-icount.h"
1216 #define DISAS_STOP DISAS_TARGET_0
1217 #define DISAS_EXIT DISAS_TARGET_1
1219 static const char regnames_HI
[][4] = {
1220 "HI0", "HI1", "HI2", "HI3",
1223 static const char regnames_LO
[][4] = {
1224 "LO0", "LO1", "LO2", "LO3",
1227 /* General purpose registers moves. */
1228 void gen_load_gpr(TCGv t
, int reg
)
1231 tcg_gen_movi_tl(t
, 0);
1233 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1237 void gen_store_gpr(TCGv t
, int reg
)
1240 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1244 #if defined(TARGET_MIPS64)
1245 void gen_load_gpr_hi(TCGv_i64 t
, int reg
)
1248 tcg_gen_movi_i64(t
, 0);
1250 tcg_gen_mov_i64(t
, cpu_gpr_hi
[reg
]);
1254 void gen_store_gpr_hi(TCGv_i64 t
, int reg
)
1257 tcg_gen_mov_i64(cpu_gpr_hi
[reg
], t
);
1260 #endif /* TARGET_MIPS64 */
1262 /* Moves to/from shadow registers. */
1263 static inline void gen_load_srsgpr(int from
, int to
)
1265 TCGv t0
= tcg_temp_new();
1268 tcg_gen_movi_tl(t0
, 0);
1270 TCGv_i32 t2
= tcg_temp_new_i32();
1271 TCGv_ptr addr
= tcg_temp_new_ptr();
1273 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1274 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1275 tcg_gen_andi_i32(t2
, t2
, 0xf);
1276 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1277 tcg_gen_ext_i32_ptr(addr
, t2
);
1278 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1280 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1281 tcg_temp_free_ptr(addr
);
1282 tcg_temp_free_i32(t2
);
1284 gen_store_gpr(t0
, to
);
1288 static inline void gen_store_srsgpr(int from
, int to
)
1291 TCGv t0
= tcg_temp_new();
1292 TCGv_i32 t2
= tcg_temp_new_i32();
1293 TCGv_ptr addr
= tcg_temp_new_ptr();
1295 gen_load_gpr(t0
, from
);
1296 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1297 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1298 tcg_gen_andi_i32(t2
, t2
, 0xf);
1299 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1300 tcg_gen_ext_i32_ptr(addr
, t2
);
1301 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1303 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1304 tcg_temp_free_ptr(addr
);
1305 tcg_temp_free_i32(t2
);
1311 static inline void gen_save_pc(target_ulong pc
)
1313 tcg_gen_movi_tl(cpu_PC
, pc
);
1316 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1318 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1319 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1320 gen_save_pc(ctx
->base
.pc_next
);
1321 ctx
->saved_pc
= ctx
->base
.pc_next
;
1323 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1324 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1325 ctx
->saved_hflags
= ctx
->hflags
;
1326 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1332 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1338 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1340 ctx
->saved_hflags
= ctx
->hflags
;
1341 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1347 ctx
->btarget
= env
->btarget
;
1352 void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1354 save_cpu_state(ctx
, 1);
1355 gen_helper_raise_exception_err(cpu_env
, tcg_constant_i32(excp
),
1356 tcg_constant_i32(err
));
1357 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1360 void generate_exception(DisasContext
*ctx
, int excp
)
1362 gen_helper_raise_exception(cpu_env
, tcg_constant_i32(excp
));
1365 void generate_exception_end(DisasContext
*ctx
, int excp
)
1367 generate_exception_err(ctx
, excp
, 0);
1370 void generate_exception_break(DisasContext
*ctx
, int code
)
1372 #ifdef CONFIG_USER_ONLY
1373 /* Pass the break code along to cpu_loop. */
1374 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
1375 offsetof(CPUMIPSState
, error_code
));
1377 generate_exception_end(ctx
, EXCP_BREAK
);
1380 void gen_reserved_instruction(DisasContext
*ctx
)
1382 generate_exception_end(ctx
, EXCP_RI
);
1385 /* Floating point register moves. */
1386 void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1388 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1389 generate_exception(ctx
, EXCP_RI
);
1391 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1394 void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1397 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1398 generate_exception(ctx
, EXCP_RI
);
1400 t64
= tcg_temp_new_i64();
1401 tcg_gen_extu_i32_i64(t64
, t
);
1402 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1403 tcg_temp_free_i64(t64
);
1406 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1408 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1409 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1411 gen_load_fpr32(ctx
, t
, reg
| 1);
1415 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1417 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1418 TCGv_i64 t64
= tcg_temp_new_i64();
1419 tcg_gen_extu_i32_i64(t64
, t
);
1420 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1421 tcg_temp_free_i64(t64
);
1423 gen_store_fpr32(ctx
, t
, reg
| 1);
1427 void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1429 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1430 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1432 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1436 void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1438 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1439 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1442 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1443 t0
= tcg_temp_new_i64();
1444 tcg_gen_shri_i64(t0
, t
, 32);
1445 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1446 tcg_temp_free_i64(t0
);
1450 int get_fp_bit(int cc
)
1459 /* Addresses computation */
1460 void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1462 tcg_gen_add_tl(ret
, arg0
, arg1
);
1464 #if defined(TARGET_MIPS64)
1465 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1466 tcg_gen_ext32s_i64(ret
, ret
);
1471 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
1474 tcg_gen_addi_tl(ret
, base
, ofs
);
1476 #if defined(TARGET_MIPS64)
1477 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1478 tcg_gen_ext32s_i64(ret
, ret
);
1483 /* Addresses computation (translation time) */
1484 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1487 target_long sum
= base
+ offset
;
1489 #if defined(TARGET_MIPS64)
1490 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1497 /* Sign-extract the low 32-bits to a target_long. */
1498 void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1500 #if defined(TARGET_MIPS64)
1501 tcg_gen_ext32s_i64(ret
, arg
);
1503 tcg_gen_extrl_i64_i32(ret
, arg
);
1507 /* Sign-extract the high 32-bits to a target_long. */
1508 void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1510 #if defined(TARGET_MIPS64)
1511 tcg_gen_sari_i64(ret
, arg
, 32);
1513 tcg_gen_extrh_i64_i32(ret
, arg
);
1517 bool check_cp0_enabled(DisasContext
*ctx
)
1519 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1520 generate_exception_end(ctx
, EXCP_CpU
);
1526 void check_cp1_enabled(DisasContext
*ctx
)
1528 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
1529 generate_exception_err(ctx
, EXCP_CpU
, 1);
1534 * Verify that the processor is running with COP1X instructions enabled.
1535 * This is associated with the nabla symbol in the MIPS32 and MIPS64
1538 void check_cop1x(DisasContext
*ctx
)
1540 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
1541 gen_reserved_instruction(ctx
);
1546 * Verify that the processor is running with 64-bit floating-point
1547 * operations enabled.
1549 void check_cp1_64bitmode(DisasContext
*ctx
)
1551 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
1552 gen_reserved_instruction(ctx
);
1557 * Verify if floating point register is valid; an operation is not defined
1558 * if bit 0 of any register specification is set and the FR bit in the
1559 * Status register equals zero, since the register numbers specify an
1560 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1561 * in the Status register equals one, both even and odd register numbers
1562 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1564 * Multiple 64 bit wide registers can be checked by calling
1565 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1567 void check_cp1_registers(DisasContext
*ctx
, int regs
)
1569 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
1570 gen_reserved_instruction(ctx
);
1575 * Verify that the processor is running with DSP instructions enabled.
1576 * This is enabled by CP0 Status register MX(24) bit.
1578 static inline void check_dsp(DisasContext
*ctx
)
1580 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1581 if (ctx
->insn_flags
& ASE_DSP
) {
1582 generate_exception_end(ctx
, EXCP_DSPDIS
);
1584 gen_reserved_instruction(ctx
);
1589 static inline void check_dsp_r2(DisasContext
*ctx
)
1591 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
1592 if (ctx
->insn_flags
& ASE_DSP
) {
1593 generate_exception_end(ctx
, EXCP_DSPDIS
);
1595 gen_reserved_instruction(ctx
);
1600 static inline void check_dsp_r3(DisasContext
*ctx
)
1602 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
1603 if (ctx
->insn_flags
& ASE_DSP
) {
1604 generate_exception_end(ctx
, EXCP_DSPDIS
);
1606 gen_reserved_instruction(ctx
);
1612 * This code generates a "reserved instruction" exception if the
1613 * CPU does not support the instruction set corresponding to flags.
1615 void check_insn(DisasContext
*ctx
, uint64_t flags
)
1617 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1618 gen_reserved_instruction(ctx
);
1623 * This code generates a "reserved instruction" exception if the
1624 * CPU has corresponding flag set which indicates that the instruction
1627 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
1629 if (unlikely(ctx
->insn_flags
& flags
)) {
1630 gen_reserved_instruction(ctx
);
1635 * The Linux kernel traps certain reserved instruction exceptions to
1636 * emulate the corresponding instructions. QEMU is the kernel in user
1637 * mode, so those traps are emulated by accepting the instructions.
1639 * A reserved instruction exception is generated for flagged CPUs if
1640 * QEMU runs in system mode.
1642 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
1644 #ifndef CONFIG_USER_ONLY
1645 check_insn_opc_removed(ctx
, flags
);
1650 * This code generates a "reserved instruction" exception if the
1651 * CPU does not support 64-bit paired-single (PS) floating point data type.
1653 static inline void check_ps(DisasContext
*ctx
)
1655 if (unlikely(!ctx
->ps
)) {
1656 generate_exception(ctx
, EXCP_RI
);
1658 check_cp1_64bitmode(ctx
);
1662 * This code generates a "reserved instruction" exception if cpu is not
1663 * 64-bit or 64-bit instructions are not enabled.
1665 void check_mips_64(DisasContext
*ctx
)
1667 if (unlikely((TARGET_LONG_BITS
!= 64) || !(ctx
->hflags
& MIPS_HFLAG_64
))) {
1668 gen_reserved_instruction(ctx
);
1672 #ifndef CONFIG_USER_ONLY
1673 static inline void check_mvh(DisasContext
*ctx
)
1675 if (unlikely(!ctx
->mvh
)) {
1676 generate_exception(ctx
, EXCP_RI
);
1682 * This code generates a "reserved instruction" exception if the
1683 * Config5 XNP bit is set.
1685 static inline void check_xnp(DisasContext
*ctx
)
1687 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
1688 gen_reserved_instruction(ctx
);
1692 #ifndef CONFIG_USER_ONLY
1694 * This code generates a "reserved instruction" exception if the
1695 * Config3 PW bit is NOT set.
1697 static inline void check_pw(DisasContext
*ctx
)
1699 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
1700 gen_reserved_instruction(ctx
);
1706 * This code generates a "reserved instruction" exception if the
1707 * Config3 MT bit is NOT set.
1709 static inline void check_mt(DisasContext
*ctx
)
1711 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1712 gen_reserved_instruction(ctx
);
1716 #ifndef CONFIG_USER_ONLY
1718 * This code generates a "coprocessor unusable" exception if CP0 is not
1719 * available, and, if that is not the case, generates a "reserved instruction"
1720 * exception if the Config5 MT bit is NOT set. This is needed for availability
1721 * control of some of MT ASE instructions.
1723 static inline void check_cp0_mt(DisasContext
*ctx
)
1725 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1726 generate_exception_end(ctx
, EXCP_CpU
);
1728 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1729 gen_reserved_instruction(ctx
);
1736 * This code generates a "reserved instruction" exception if the
1737 * Config5 NMS bit is set.
1739 static inline void check_nms(DisasContext
*ctx
)
1741 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
1742 gen_reserved_instruction(ctx
);
1747 * This code generates a "reserved instruction" exception if the
1748 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
1749 * Config2 TL, and Config5 L2C are unset.
1751 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
1753 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
1754 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
1755 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
1756 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
1757 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
1758 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
1759 gen_reserved_instruction(ctx
);
1764 * This code generates a "reserved instruction" exception if the
1765 * Config5 EVA bit is NOT set.
1767 static inline void check_eva(DisasContext
*ctx
)
1769 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
1770 gen_reserved_instruction(ctx
);
1776 * Define small wrappers for gen_load_fpr* so that we have a uniform
1777 * calling interface for 32 and 64-bit FPRs. No sense in changing
1778 * all callers for gen_load_fpr32 when we need the CTX parameter for
1781 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1782 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1783 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1784 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1785 int ft, int fs, int cc) \
1787 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
1788 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
1797 check_cp1_registers(ctx, fs | ft); \
1805 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
1806 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
1809 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
1812 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
1815 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
1818 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
1821 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
1824 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
1827 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
1830 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
1833 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
1836 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
1839 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
1842 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
1845 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
1848 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
1851 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
1854 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
1859 tcg_temp_free_i##bits(fp0); \
1860 tcg_temp_free_i##bits(fp1); \
1863 FOP_CONDS(, 0, d
, FMT_D
, 64)
1864 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1865 FOP_CONDS(, 0, s
, FMT_S
, 32)
1866 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1867 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1868 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1871 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1872 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
1873 int ft, int fs, int fd) \
1875 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1876 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1877 if (ifmt == FMT_D) { \
1878 check_cp1_registers(ctx, fs | ft | fd); \
1880 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1881 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1884 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1887 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1890 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1893 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1896 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1899 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1902 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1905 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1908 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1911 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1914 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1917 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1920 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1923 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1926 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1929 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1932 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1935 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1938 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1941 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1944 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
1947 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
1953 tcg_temp_free_i ## bits(fp0); \
1954 tcg_temp_free_i ## bits(fp1); \
1957 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
1958 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
1960 #undef gen_ldcmp_fpr32
1961 #undef gen_ldcmp_fpr64
1963 /* load/store instructions. */
1964 #ifdef CONFIG_USER_ONLY
1965 #define OP_LD_ATOMIC(insn, fname) \
1966 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1967 DisasContext *ctx) \
1969 TCGv t0 = tcg_temp_new(); \
1970 tcg_gen_mov_tl(t0, arg1); \
1971 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1972 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1973 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1974 tcg_temp_free(t0); \
1977 #define OP_LD_ATOMIC(insn, fname) \
1978 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1979 DisasContext *ctx) \
1981 gen_helper_##insn(ret, cpu_env, arg1, tcg_constant_i32(mem_idx)); \
1984 OP_LD_ATOMIC(ll
, ld32s
);
1985 #if defined(TARGET_MIPS64)
1986 OP_LD_ATOMIC(lld
, ld64
);
1990 void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
, int base
, int offset
)
1993 tcg_gen_movi_tl(addr
, offset
);
1994 } else if (offset
== 0) {
1995 gen_load_gpr(addr
, base
);
1997 tcg_gen_movi_tl(addr
, offset
);
1998 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2002 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
2004 target_ulong pc
= ctx
->base
.pc_next
;
2006 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2007 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2012 pc
&= ~(target_ulong
)3;
2017 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2018 int rt
, int base
, int offset
)
2021 int mem_idx
= ctx
->mem_idx
;
2023 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
|
2026 * Loongson CPU uses a load to zero register for prefetch.
2027 * We emulate it as a NOP. On other CPU we must perform the
2028 * actual memory access.
2033 t0
= tcg_temp_new();
2034 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2037 #if defined(TARGET_MIPS64)
2039 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2040 ctx
->default_tcg_memop_mask
);
2041 gen_store_gpr(t0
, rt
);
2044 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
|
2045 ctx
->default_tcg_memop_mask
);
2046 gen_store_gpr(t0
, rt
);
2050 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2051 gen_store_gpr(t0
, rt
);
2054 t1
= tcg_temp_new();
2056 * Do a byte access to possibly trigger a page
2057 * fault with the unaligned address.
2059 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2060 tcg_gen_andi_tl(t1
, t0
, 7);
2061 if (!cpu_is_bigendian(ctx
)) {
2062 tcg_gen_xori_tl(t1
, t1
, 7);
2064 tcg_gen_shli_tl(t1
, t1
, 3);
2065 tcg_gen_andi_tl(t0
, t0
, ~7);
2066 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2067 tcg_gen_shl_tl(t0
, t0
, t1
);
2068 t2
= tcg_const_tl(-1);
2069 tcg_gen_shl_tl(t2
, t2
, t1
);
2070 gen_load_gpr(t1
, rt
);
2071 tcg_gen_andc_tl(t1
, t1
, t2
);
2073 tcg_gen_or_tl(t0
, t0
, t1
);
2075 gen_store_gpr(t0
, rt
);
2078 t1
= tcg_temp_new();
2080 * Do a byte access to possibly trigger a page
2081 * fault with the unaligned address.
2083 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2084 tcg_gen_andi_tl(t1
, t0
, 7);
2085 if (cpu_is_bigendian(ctx
)) {
2086 tcg_gen_xori_tl(t1
, t1
, 7);
2088 tcg_gen_shli_tl(t1
, t1
, 3);
2089 tcg_gen_andi_tl(t0
, t0
, ~7);
2090 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2091 tcg_gen_shr_tl(t0
, t0
, t1
);
2092 tcg_gen_xori_tl(t1
, t1
, 63);
2093 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2094 tcg_gen_shl_tl(t2
, t2
, t1
);
2095 gen_load_gpr(t1
, rt
);
2096 tcg_gen_and_tl(t1
, t1
, t2
);
2098 tcg_gen_or_tl(t0
, t0
, t1
);
2100 gen_store_gpr(t0
, rt
);
2103 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2104 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2106 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2107 gen_store_gpr(t0
, rt
);
2111 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2112 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2114 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2115 gen_store_gpr(t0
, rt
);
2118 mem_idx
= MIPS_HFLAG_UM
;
2121 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2122 ctx
->default_tcg_memop_mask
);
2123 gen_store_gpr(t0
, rt
);
2126 mem_idx
= MIPS_HFLAG_UM
;
2129 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2130 ctx
->default_tcg_memop_mask
);
2131 gen_store_gpr(t0
, rt
);
2134 mem_idx
= MIPS_HFLAG_UM
;
2137 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2138 ctx
->default_tcg_memop_mask
);
2139 gen_store_gpr(t0
, rt
);
2142 mem_idx
= MIPS_HFLAG_UM
;
2145 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2146 gen_store_gpr(t0
, rt
);
2149 mem_idx
= MIPS_HFLAG_UM
;
2152 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2153 gen_store_gpr(t0
, rt
);
2156 mem_idx
= MIPS_HFLAG_UM
;
2159 t1
= tcg_temp_new();
2161 * Do a byte access to possibly trigger a page
2162 * fault with the unaligned address.
2164 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2165 tcg_gen_andi_tl(t1
, t0
, 3);
2166 if (!cpu_is_bigendian(ctx
)) {
2167 tcg_gen_xori_tl(t1
, t1
, 3);
2169 tcg_gen_shli_tl(t1
, t1
, 3);
2170 tcg_gen_andi_tl(t0
, t0
, ~3);
2171 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2172 tcg_gen_shl_tl(t0
, t0
, t1
);
2173 t2
= tcg_const_tl(-1);
2174 tcg_gen_shl_tl(t2
, t2
, t1
);
2175 gen_load_gpr(t1
, rt
);
2176 tcg_gen_andc_tl(t1
, t1
, t2
);
2178 tcg_gen_or_tl(t0
, t0
, t1
);
2180 tcg_gen_ext32s_tl(t0
, t0
);
2181 gen_store_gpr(t0
, rt
);
2184 mem_idx
= MIPS_HFLAG_UM
;
2187 t1
= tcg_temp_new();
2189 * Do a byte access to possibly trigger a page
2190 * fault with the unaligned address.
2192 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2193 tcg_gen_andi_tl(t1
, t0
, 3);
2194 if (cpu_is_bigendian(ctx
)) {
2195 tcg_gen_xori_tl(t1
, t1
, 3);
2197 tcg_gen_shli_tl(t1
, t1
, 3);
2198 tcg_gen_andi_tl(t0
, t0
, ~3);
2199 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2200 tcg_gen_shr_tl(t0
, t0
, t1
);
2201 tcg_gen_xori_tl(t1
, t1
, 31);
2202 t2
= tcg_const_tl(0xfffffffeull
);
2203 tcg_gen_shl_tl(t2
, t2
, t1
);
2204 gen_load_gpr(t1
, rt
);
2205 tcg_gen_and_tl(t1
, t1
, t2
);
2207 tcg_gen_or_tl(t0
, t0
, t1
);
2209 tcg_gen_ext32s_tl(t0
, t0
);
2210 gen_store_gpr(t0
, rt
);
2213 mem_idx
= MIPS_HFLAG_UM
;
2217 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2218 gen_store_gpr(t0
, rt
);
2225 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
2226 int base
, int offset
)
2228 TCGv t0
= tcg_temp_new();
2229 TCGv t1
= tcg_temp_new();
2230 int mem_idx
= ctx
->mem_idx
;
2232 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2233 gen_load_gpr(t1
, rt
);
2235 #if defined(TARGET_MIPS64)
2237 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUQ
|
2238 ctx
->default_tcg_memop_mask
);
2241 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2244 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2248 mem_idx
= MIPS_HFLAG_UM
;
2251 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2252 ctx
->default_tcg_memop_mask
);
2255 mem_idx
= MIPS_HFLAG_UM
;
2258 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2259 ctx
->default_tcg_memop_mask
);
2262 mem_idx
= MIPS_HFLAG_UM
;
2265 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2268 mem_idx
= MIPS_HFLAG_UM
;
2271 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2274 mem_idx
= MIPS_HFLAG_UM
;
2277 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2285 /* Store conditional */
2286 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
2287 MemOp tcg_mo
, bool eva
)
2290 TCGLabel
*l1
= gen_new_label();
2291 TCGLabel
*done
= gen_new_label();
2293 t0
= tcg_temp_new();
2294 addr
= tcg_temp_new();
2295 /* compare the address against that of the preceding LL */
2296 gen_base_offset_addr(ctx
, addr
, base
, offset
);
2297 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
2298 tcg_temp_free(addr
);
2299 tcg_gen_movi_tl(t0
, 0);
2300 gen_store_gpr(t0
, rt
);
2304 /* generate cmpxchg */
2305 val
= tcg_temp_new();
2306 gen_load_gpr(val
, rt
);
2307 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
2308 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
2309 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
2310 gen_store_gpr(t0
, rt
);
2313 gen_set_label(done
);
2317 /* Load and store */
2318 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
2322 * Don't do NOP if destination is zero: we must perform the actual
2328 TCGv_i32 fp0
= tcg_temp_new_i32();
2329 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2330 ctx
->default_tcg_memop_mask
);
2331 gen_store_fpr32(ctx
, fp0
, ft
);
2332 tcg_temp_free_i32(fp0
);
2337 TCGv_i32 fp0
= tcg_temp_new_i32();
2338 gen_load_fpr32(ctx
, fp0
, ft
);
2339 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2340 ctx
->default_tcg_memop_mask
);
2341 tcg_temp_free_i32(fp0
);
2346 TCGv_i64 fp0
= tcg_temp_new_i64();
2347 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2348 ctx
->default_tcg_memop_mask
);
2349 gen_store_fpr64(ctx
, fp0
, ft
);
2350 tcg_temp_free_i64(fp0
);
2355 TCGv_i64 fp0
= tcg_temp_new_i64();
2356 gen_load_fpr64(ctx
, fp0
, ft
);
2357 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2358 ctx
->default_tcg_memop_mask
);
2359 tcg_temp_free_i64(fp0
);
2363 MIPS_INVAL("flt_ldst");
2364 gen_reserved_instruction(ctx
);
2369 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2370 int rs
, int16_t imm
)
2372 TCGv t0
= tcg_temp_new();
2374 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2375 check_cp1_enabled(ctx
);
2379 check_insn(ctx
, ISA_MIPS2
);
2382 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2383 gen_flt_ldst(ctx
, op
, rt
, t0
);
2386 generate_exception_err(ctx
, EXCP_CpU
, 1);
2391 /* Arithmetic with immediate operand */
2392 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2393 int rt
, int rs
, int imm
)
2395 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2397 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2399 * If no destination, treat it as a NOP.
2400 * For addi, we must generate the overflow exception when needed.
2407 TCGv t0
= tcg_temp_local_new();
2408 TCGv t1
= tcg_temp_new();
2409 TCGv t2
= tcg_temp_new();
2410 TCGLabel
*l1
= gen_new_label();
2412 gen_load_gpr(t1
, rs
);
2413 tcg_gen_addi_tl(t0
, t1
, uimm
);
2414 tcg_gen_ext32s_tl(t0
, t0
);
2416 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2417 tcg_gen_xori_tl(t2
, t0
, uimm
);
2418 tcg_gen_and_tl(t1
, t1
, t2
);
2420 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2422 /* operands of same sign, result different sign */
2423 generate_exception(ctx
, EXCP_OVERFLOW
);
2425 tcg_gen_ext32s_tl(t0
, t0
);
2426 gen_store_gpr(t0
, rt
);
2432 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2433 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2435 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2438 #if defined(TARGET_MIPS64)
2441 TCGv t0
= tcg_temp_local_new();
2442 TCGv t1
= tcg_temp_new();
2443 TCGv t2
= tcg_temp_new();
2444 TCGLabel
*l1
= gen_new_label();
2446 gen_load_gpr(t1
, rs
);
2447 tcg_gen_addi_tl(t0
, t1
, uimm
);
2449 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2450 tcg_gen_xori_tl(t2
, t0
, uimm
);
2451 tcg_gen_and_tl(t1
, t1
, t2
);
2453 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2455 /* operands of same sign, result different sign */
2456 generate_exception(ctx
, EXCP_OVERFLOW
);
2458 gen_store_gpr(t0
, rt
);
2464 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2466 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2473 /* Logic with immediate operand */
2474 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2475 int rt
, int rs
, int16_t imm
)
2480 /* If no destination, treat it as a NOP. */
2483 uimm
= (uint16_t)imm
;
2486 if (likely(rs
!= 0)) {
2487 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2489 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2494 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2496 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2500 if (likely(rs
!= 0)) {
2501 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2503 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2507 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS_R6
)) {
2509 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2510 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2512 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2521 /* Set on less than with immediate operand */
2522 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2523 int rt
, int rs
, int16_t imm
)
2525 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2529 /* If no destination, treat it as a NOP. */
2532 t0
= tcg_temp_new();
2533 gen_load_gpr(t0
, rs
);
2536 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2539 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2545 /* Shifts with immediate operand */
2546 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2547 int rt
, int rs
, int16_t imm
)
2549 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2553 /* If no destination, treat it as a NOP. */
2557 t0
= tcg_temp_new();
2558 gen_load_gpr(t0
, rs
);
2561 tcg_gen_shli_tl(t0
, t0
, uimm
);
2562 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2565 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2569 tcg_gen_ext32u_tl(t0
, t0
);
2570 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2572 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2577 TCGv_i32 t1
= tcg_temp_new_i32();
2579 tcg_gen_trunc_tl_i32(t1
, t0
);
2580 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2581 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2582 tcg_temp_free_i32(t1
);
2584 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2587 #if defined(TARGET_MIPS64)
2589 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2592 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2595 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2599 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2601 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2605 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2608 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2611 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2614 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2622 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2623 int rd
, int rs
, int rt
)
2625 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2626 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2628 * If no destination, treat it as a NOP.
2629 * For add & sub, we must generate the overflow exception when needed.
2637 TCGv t0
= tcg_temp_local_new();
2638 TCGv t1
= tcg_temp_new();
2639 TCGv t2
= tcg_temp_new();
2640 TCGLabel
*l1
= gen_new_label();
2642 gen_load_gpr(t1
, rs
);
2643 gen_load_gpr(t2
, rt
);
2644 tcg_gen_add_tl(t0
, t1
, t2
);
2645 tcg_gen_ext32s_tl(t0
, t0
);
2646 tcg_gen_xor_tl(t1
, t1
, t2
);
2647 tcg_gen_xor_tl(t2
, t0
, t2
);
2648 tcg_gen_andc_tl(t1
, t2
, t1
);
2650 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2652 /* operands of same sign, result different sign */
2653 generate_exception(ctx
, EXCP_OVERFLOW
);
2655 gen_store_gpr(t0
, rd
);
2660 if (rs
!= 0 && rt
!= 0) {
2661 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2662 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2663 } else if (rs
== 0 && rt
!= 0) {
2664 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2665 } else if (rs
!= 0 && rt
== 0) {
2666 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2668 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2673 TCGv t0
= tcg_temp_local_new();
2674 TCGv t1
= tcg_temp_new();
2675 TCGv t2
= tcg_temp_new();
2676 TCGLabel
*l1
= gen_new_label();
2678 gen_load_gpr(t1
, rs
);
2679 gen_load_gpr(t2
, rt
);
2680 tcg_gen_sub_tl(t0
, t1
, t2
);
2681 tcg_gen_ext32s_tl(t0
, t0
);
2682 tcg_gen_xor_tl(t2
, t1
, t2
);
2683 tcg_gen_xor_tl(t1
, t0
, t1
);
2684 tcg_gen_and_tl(t1
, t1
, t2
);
2686 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2689 * operands of different sign, first operand and the result
2692 generate_exception(ctx
, EXCP_OVERFLOW
);
2694 gen_store_gpr(t0
, rd
);
2699 if (rs
!= 0 && rt
!= 0) {
2700 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2701 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2702 } else if (rs
== 0 && rt
!= 0) {
2703 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2704 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2705 } else if (rs
!= 0 && rt
== 0) {
2706 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2708 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2711 #if defined(TARGET_MIPS64)
2714 TCGv t0
= tcg_temp_local_new();
2715 TCGv t1
= tcg_temp_new();
2716 TCGv t2
= tcg_temp_new();
2717 TCGLabel
*l1
= gen_new_label();
2719 gen_load_gpr(t1
, rs
);
2720 gen_load_gpr(t2
, rt
);
2721 tcg_gen_add_tl(t0
, t1
, t2
);
2722 tcg_gen_xor_tl(t1
, t1
, t2
);
2723 tcg_gen_xor_tl(t2
, t0
, t2
);
2724 tcg_gen_andc_tl(t1
, t2
, t1
);
2726 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2728 /* operands of same sign, result different sign */
2729 generate_exception(ctx
, EXCP_OVERFLOW
);
2731 gen_store_gpr(t0
, rd
);
2736 if (rs
!= 0 && rt
!= 0) {
2737 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2738 } else if (rs
== 0 && rt
!= 0) {
2739 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2740 } else if (rs
!= 0 && rt
== 0) {
2741 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2743 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2748 TCGv t0
= tcg_temp_local_new();
2749 TCGv t1
= tcg_temp_new();
2750 TCGv t2
= tcg_temp_new();
2751 TCGLabel
*l1
= gen_new_label();
2753 gen_load_gpr(t1
, rs
);
2754 gen_load_gpr(t2
, rt
);
2755 tcg_gen_sub_tl(t0
, t1
, t2
);
2756 tcg_gen_xor_tl(t2
, t1
, t2
);
2757 tcg_gen_xor_tl(t1
, t0
, t1
);
2758 tcg_gen_and_tl(t1
, t1
, t2
);
2760 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2763 * Operands of different sign, first operand and result different
2766 generate_exception(ctx
, EXCP_OVERFLOW
);
2768 gen_store_gpr(t0
, rd
);
2773 if (rs
!= 0 && rt
!= 0) {
2774 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2775 } else if (rs
== 0 && rt
!= 0) {
2776 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2777 } else if (rs
!= 0 && rt
== 0) {
2778 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2780 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2785 if (likely(rs
!= 0 && rt
!= 0)) {
2786 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2787 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2789 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2795 /* Conditional move */
2796 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2797 int rd
, int rs
, int rt
)
2802 /* If no destination, treat it as a NOP. */
2806 t0
= tcg_temp_new();
2807 gen_load_gpr(t0
, rt
);
2808 t1
= tcg_const_tl(0);
2809 t2
= tcg_temp_new();
2810 gen_load_gpr(t2
, rs
);
2813 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2816 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2819 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2822 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2831 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2832 int rd
, int rs
, int rt
)
2835 /* If no destination, treat it as a NOP. */
2841 if (likely(rs
!= 0 && rt
!= 0)) {
2842 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2844 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2848 if (rs
!= 0 && rt
!= 0) {
2849 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2850 } else if (rs
== 0 && rt
!= 0) {
2851 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2852 } else if (rs
!= 0 && rt
== 0) {
2853 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2855 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2859 if (likely(rs
!= 0 && rt
!= 0)) {
2860 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2861 } else if (rs
== 0 && rt
!= 0) {
2862 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2863 } else if (rs
!= 0 && rt
== 0) {
2864 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2866 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2870 if (likely(rs
!= 0 && rt
!= 0)) {
2871 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2872 } else if (rs
== 0 && rt
!= 0) {
2873 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2874 } else if (rs
!= 0 && rt
== 0) {
2875 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2877 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2883 /* Set on lower than */
2884 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2885 int rd
, int rs
, int rt
)
2890 /* If no destination, treat it as a NOP. */
2894 t0
= tcg_temp_new();
2895 t1
= tcg_temp_new();
2896 gen_load_gpr(t0
, rs
);
2897 gen_load_gpr(t1
, rt
);
2900 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2903 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2911 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2912 int rd
, int rs
, int rt
)
2918 * If no destination, treat it as a NOP.
2919 * For add & sub, we must generate the overflow exception when needed.
2924 t0
= tcg_temp_new();
2925 t1
= tcg_temp_new();
2926 gen_load_gpr(t0
, rs
);
2927 gen_load_gpr(t1
, rt
);
2930 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2931 tcg_gen_shl_tl(t0
, t1
, t0
);
2932 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2935 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2936 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2939 tcg_gen_ext32u_tl(t1
, t1
);
2940 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2941 tcg_gen_shr_tl(t0
, t1
, t0
);
2942 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2946 TCGv_i32 t2
= tcg_temp_new_i32();
2947 TCGv_i32 t3
= tcg_temp_new_i32();
2949 tcg_gen_trunc_tl_i32(t2
, t0
);
2950 tcg_gen_trunc_tl_i32(t3
, t1
);
2951 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2952 tcg_gen_rotr_i32(t2
, t3
, t2
);
2953 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2954 tcg_temp_free_i32(t2
);
2955 tcg_temp_free_i32(t3
);
2958 #if defined(TARGET_MIPS64)
2960 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2961 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2964 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2965 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2968 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2969 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2972 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2973 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2981 /* Arithmetic on HI/LO registers */
2982 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
2984 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2995 #if defined(TARGET_MIPS64)
2997 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3001 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3005 #if defined(TARGET_MIPS64)
3007 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3011 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3016 #if defined(TARGET_MIPS64)
3018 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3022 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3025 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3030 #if defined(TARGET_MIPS64)
3032 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3036 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3039 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3045 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3048 TCGv t0
= tcg_const_tl(addr
);
3049 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3050 gen_store_gpr(t0
, reg
);
3054 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3060 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3063 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3064 addr
= addr_add(ctx
, pc
, offset
);
3065 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3069 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3070 addr
= addr_add(ctx
, pc
, offset
);
3071 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3073 #if defined(TARGET_MIPS64)
3076 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3077 addr
= addr_add(ctx
, pc
, offset
);
3078 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3082 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3085 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3086 addr
= addr_add(ctx
, pc
, offset
);
3087 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3092 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3093 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3094 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3097 #if defined(TARGET_MIPS64)
3098 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3099 case R6_OPC_LDPC
+ (1 << 16):
3100 case R6_OPC_LDPC
+ (2 << 16):
3101 case R6_OPC_LDPC
+ (3 << 16):
3103 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3104 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3105 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUQ
);
3109 MIPS_INVAL("OPC_PCREL");
3110 gen_reserved_instruction(ctx
);
3117 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3126 t0
= tcg_temp_new();
3127 t1
= tcg_temp_new();
3129 gen_load_gpr(t0
, rs
);
3130 gen_load_gpr(t1
, rt
);
3135 TCGv t2
= tcg_temp_new();
3136 TCGv t3
= tcg_temp_new();
3137 tcg_gen_ext32s_tl(t0
, t0
);
3138 tcg_gen_ext32s_tl(t1
, t1
);
3139 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3140 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3141 tcg_gen_and_tl(t2
, t2
, t3
);
3142 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3143 tcg_gen_or_tl(t2
, t2
, t3
);
3144 tcg_gen_movi_tl(t3
, 0);
3145 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3146 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3147 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3154 TCGv t2
= tcg_temp_new();
3155 TCGv t3
= tcg_temp_new();
3156 tcg_gen_ext32s_tl(t0
, t0
);
3157 tcg_gen_ext32s_tl(t1
, t1
);
3158 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3159 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3160 tcg_gen_and_tl(t2
, t2
, t3
);
3161 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3162 tcg_gen_or_tl(t2
, t2
, t3
);
3163 tcg_gen_movi_tl(t3
, 0);
3164 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3165 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3166 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3173 TCGv t2
= tcg_const_tl(0);
3174 TCGv t3
= tcg_const_tl(1);
3175 tcg_gen_ext32u_tl(t0
, t0
);
3176 tcg_gen_ext32u_tl(t1
, t1
);
3177 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3178 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3179 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3186 TCGv t2
= tcg_const_tl(0);
3187 TCGv t3
= tcg_const_tl(1);
3188 tcg_gen_ext32u_tl(t0
, t0
);
3189 tcg_gen_ext32u_tl(t1
, t1
);
3190 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3191 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3192 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3199 TCGv_i32 t2
= tcg_temp_new_i32();
3200 TCGv_i32 t3
= tcg_temp_new_i32();
3201 tcg_gen_trunc_tl_i32(t2
, t0
);
3202 tcg_gen_trunc_tl_i32(t3
, t1
);
3203 tcg_gen_mul_i32(t2
, t2
, t3
);
3204 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3205 tcg_temp_free_i32(t2
);
3206 tcg_temp_free_i32(t3
);
3211 TCGv_i32 t2
= tcg_temp_new_i32();
3212 TCGv_i32 t3
= tcg_temp_new_i32();
3213 tcg_gen_trunc_tl_i32(t2
, t0
);
3214 tcg_gen_trunc_tl_i32(t3
, t1
);
3215 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3216 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3217 tcg_temp_free_i32(t2
);
3218 tcg_temp_free_i32(t3
);
3223 TCGv_i32 t2
= tcg_temp_new_i32();
3224 TCGv_i32 t3
= tcg_temp_new_i32();
3225 tcg_gen_trunc_tl_i32(t2
, t0
);
3226 tcg_gen_trunc_tl_i32(t3
, t1
);
3227 tcg_gen_mul_i32(t2
, t2
, t3
);
3228 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3229 tcg_temp_free_i32(t2
);
3230 tcg_temp_free_i32(t3
);
3235 TCGv_i32 t2
= tcg_temp_new_i32();
3236 TCGv_i32 t3
= tcg_temp_new_i32();
3237 tcg_gen_trunc_tl_i32(t2
, t0
);
3238 tcg_gen_trunc_tl_i32(t3
, t1
);
3239 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3240 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3241 tcg_temp_free_i32(t2
);
3242 tcg_temp_free_i32(t3
);
3245 #if defined(TARGET_MIPS64)
3248 TCGv t2
= tcg_temp_new();
3249 TCGv t3
= tcg_temp_new();
3250 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3251 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3252 tcg_gen_and_tl(t2
, t2
, t3
);
3253 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3254 tcg_gen_or_tl(t2
, t2
, t3
);
3255 tcg_gen_movi_tl(t3
, 0);
3256 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3257 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3264 TCGv t2
= tcg_temp_new();
3265 TCGv t3
= tcg_temp_new();
3266 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3267 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3268 tcg_gen_and_tl(t2
, t2
, t3
);
3269 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3270 tcg_gen_or_tl(t2
, t2
, t3
);
3271 tcg_gen_movi_tl(t3
, 0);
3272 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3273 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3280 TCGv t2
= tcg_const_tl(0);
3281 TCGv t3
= tcg_const_tl(1);
3282 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3283 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3290 TCGv t2
= tcg_const_tl(0);
3291 TCGv t3
= tcg_const_tl(1);
3292 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3293 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3299 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3303 TCGv t2
= tcg_temp_new();
3304 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3309 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3313 TCGv t2
= tcg_temp_new();
3314 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3320 MIPS_INVAL("r6 mul/div");
3321 gen_reserved_instruction(ctx
);
3329 #if defined(TARGET_MIPS64)
3330 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
3334 t0
= tcg_temp_new();
3335 t1
= tcg_temp_new();
3337 gen_load_gpr(t0
, rs
);
3338 gen_load_gpr(t1
, rt
);
3343 TCGv t2
= tcg_temp_new();
3344 TCGv t3
= tcg_temp_new();
3345 tcg_gen_ext32s_tl(t0
, t0
);
3346 tcg_gen_ext32s_tl(t1
, t1
);
3347 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3348 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3349 tcg_gen_and_tl(t2
, t2
, t3
);
3350 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3351 tcg_gen_or_tl(t2
, t2
, t3
);
3352 tcg_gen_movi_tl(t3
, 0);
3353 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3354 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
3355 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
3356 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3357 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3364 TCGv t2
= tcg_const_tl(0);
3365 TCGv t3
= tcg_const_tl(1);
3366 tcg_gen_ext32u_tl(t0
, t0
);
3367 tcg_gen_ext32u_tl(t1
, t1
);
3368 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3369 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
3370 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
3371 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3372 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3378 MIPS_INVAL("div1 TX79");
3379 gen_reserved_instruction(ctx
);
3388 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3389 int acc
, int rs
, int rt
)
3393 t0
= tcg_temp_new();
3394 t1
= tcg_temp_new();
3396 gen_load_gpr(t0
, rs
);
3397 gen_load_gpr(t1
, rt
);
3406 TCGv t2
= tcg_temp_new();
3407 TCGv t3
= tcg_temp_new();
3408 tcg_gen_ext32s_tl(t0
, t0
);
3409 tcg_gen_ext32s_tl(t1
, t1
);
3410 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3411 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3412 tcg_gen_and_tl(t2
, t2
, t3
);
3413 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3414 tcg_gen_or_tl(t2
, t2
, t3
);
3415 tcg_gen_movi_tl(t3
, 0);
3416 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3417 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3418 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3419 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3420 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3427 TCGv t2
= tcg_const_tl(0);
3428 TCGv t3
= tcg_const_tl(1);
3429 tcg_gen_ext32u_tl(t0
, t0
);
3430 tcg_gen_ext32u_tl(t1
, t1
);
3431 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3432 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3433 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3434 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3435 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3442 TCGv_i32 t2
= tcg_temp_new_i32();
3443 TCGv_i32 t3
= tcg_temp_new_i32();
3444 tcg_gen_trunc_tl_i32(t2
, t0
);
3445 tcg_gen_trunc_tl_i32(t3
, t1
);
3446 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3447 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3448 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3449 tcg_temp_free_i32(t2
);
3450 tcg_temp_free_i32(t3
);
3455 TCGv_i32 t2
= tcg_temp_new_i32();
3456 TCGv_i32 t3
= tcg_temp_new_i32();
3457 tcg_gen_trunc_tl_i32(t2
, t0
);
3458 tcg_gen_trunc_tl_i32(t3
, t1
);
3459 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3460 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3461 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3462 tcg_temp_free_i32(t2
);
3463 tcg_temp_free_i32(t3
);
3466 #if defined(TARGET_MIPS64)
3469 TCGv t2
= tcg_temp_new();
3470 TCGv t3
= tcg_temp_new();
3471 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3472 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3473 tcg_gen_and_tl(t2
, t2
, t3
);
3474 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3475 tcg_gen_or_tl(t2
, t2
, t3
);
3476 tcg_gen_movi_tl(t3
, 0);
3477 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3478 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3479 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3486 TCGv t2
= tcg_const_tl(0);
3487 TCGv t3
= tcg_const_tl(1);
3488 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3489 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3490 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3496 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3499 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3504 TCGv_i64 t2
= tcg_temp_new_i64();
3505 TCGv_i64 t3
= tcg_temp_new_i64();
3507 tcg_gen_ext_tl_i64(t2
, t0
);
3508 tcg_gen_ext_tl_i64(t3
, t1
);
3509 tcg_gen_mul_i64(t2
, t2
, t3
);
3510 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3511 tcg_gen_add_i64(t2
, t2
, t3
);
3512 tcg_temp_free_i64(t3
);
3513 gen_move_low32(cpu_LO
[acc
], t2
);
3514 gen_move_high32(cpu_HI
[acc
], t2
);
3515 tcg_temp_free_i64(t2
);
3520 TCGv_i64 t2
= tcg_temp_new_i64();
3521 TCGv_i64 t3
= tcg_temp_new_i64();
3523 tcg_gen_ext32u_tl(t0
, t0
);
3524 tcg_gen_ext32u_tl(t1
, t1
);
3525 tcg_gen_extu_tl_i64(t2
, t0
);
3526 tcg_gen_extu_tl_i64(t3
, t1
);
3527 tcg_gen_mul_i64(t2
, t2
, t3
);
3528 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3529 tcg_gen_add_i64(t2
, t2
, t3
);
3530 tcg_temp_free_i64(t3
);
3531 gen_move_low32(cpu_LO
[acc
], t2
);
3532 gen_move_high32(cpu_HI
[acc
], t2
);
3533 tcg_temp_free_i64(t2
);
3538 TCGv_i64 t2
= tcg_temp_new_i64();
3539 TCGv_i64 t3
= tcg_temp_new_i64();
3541 tcg_gen_ext_tl_i64(t2
, t0
);
3542 tcg_gen_ext_tl_i64(t3
, t1
);
3543 tcg_gen_mul_i64(t2
, t2
, t3
);
3544 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3545 tcg_gen_sub_i64(t2
, t3
, t2
);
3546 tcg_temp_free_i64(t3
);
3547 gen_move_low32(cpu_LO
[acc
], t2
);
3548 gen_move_high32(cpu_HI
[acc
], t2
);
3549 tcg_temp_free_i64(t2
);
3554 TCGv_i64 t2
= tcg_temp_new_i64();
3555 TCGv_i64 t3
= tcg_temp_new_i64();
3557 tcg_gen_ext32u_tl(t0
, t0
);
3558 tcg_gen_ext32u_tl(t1
, t1
);
3559 tcg_gen_extu_tl_i64(t2
, t0
);
3560 tcg_gen_extu_tl_i64(t3
, t1
);
3561 tcg_gen_mul_i64(t2
, t2
, t3
);
3562 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3563 tcg_gen_sub_i64(t2
, t3
, t2
);
3564 tcg_temp_free_i64(t3
);
3565 gen_move_low32(cpu_LO
[acc
], t2
);
3566 gen_move_high32(cpu_HI
[acc
], t2
);
3567 tcg_temp_free_i64(t2
);
3571 MIPS_INVAL("mul/div");
3572 gen_reserved_instruction(ctx
);
3581 * These MULT[U] and MADD[U] instructions implemented in for example
3582 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
3583 * architectures are special three-operand variants with the syntax
3585 * MULT[U][1] rd, rs, rt
3589 * (rd, LO, HI) <- rs * rt
3593 * MADD[U][1] rd, rs, rt
3597 * (rd, LO, HI) <- (LO, HI) + rs * rt
3599 * where the low-order 32-bits of the result is placed into both the
3600 * GPR rd and the special register LO. The high-order 32-bits of the
3601 * result is placed into the special register HI.
3603 * If the GPR rd is omitted in assembly language, it is taken to be 0,
3604 * which is the zero register that always reads as 0.
3606 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
3607 int rd
, int rs
, int rt
)
3609 TCGv t0
= tcg_temp_new();
3610 TCGv t1
= tcg_temp_new();
3613 gen_load_gpr(t0
, rs
);
3614 gen_load_gpr(t1
, rt
);
3622 TCGv_i32 t2
= tcg_temp_new_i32();
3623 TCGv_i32 t3
= tcg_temp_new_i32();
3624 tcg_gen_trunc_tl_i32(t2
, t0
);
3625 tcg_gen_trunc_tl_i32(t3
, t1
);
3626 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3628 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3630 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3631 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3632 tcg_temp_free_i32(t2
);
3633 tcg_temp_free_i32(t3
);
3636 case MMI_OPC_MULTU1
:
3641 TCGv_i32 t2
= tcg_temp_new_i32();
3642 TCGv_i32 t3
= tcg_temp_new_i32();
3643 tcg_gen_trunc_tl_i32(t2
, t0
);
3644 tcg_gen_trunc_tl_i32(t3
, t1
);
3645 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3647 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3649 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3650 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3651 tcg_temp_free_i32(t2
);
3652 tcg_temp_free_i32(t3
);
3660 TCGv_i64 t2
= tcg_temp_new_i64();
3661 TCGv_i64 t3
= tcg_temp_new_i64();
3663 tcg_gen_ext_tl_i64(t2
, t0
);
3664 tcg_gen_ext_tl_i64(t3
, t1
);
3665 tcg_gen_mul_i64(t2
, t2
, t3
);
3666 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3667 tcg_gen_add_i64(t2
, t2
, t3
);
3668 tcg_temp_free_i64(t3
);
3669 gen_move_low32(cpu_LO
[acc
], t2
);
3670 gen_move_high32(cpu_HI
[acc
], t2
);
3672 gen_move_low32(cpu_gpr
[rd
], t2
);
3674 tcg_temp_free_i64(t2
);
3677 case MMI_OPC_MADDU1
:
3682 TCGv_i64 t2
= tcg_temp_new_i64();
3683 TCGv_i64 t3
= tcg_temp_new_i64();
3685 tcg_gen_ext32u_tl(t0
, t0
);
3686 tcg_gen_ext32u_tl(t1
, t1
);
3687 tcg_gen_extu_tl_i64(t2
, t0
);
3688 tcg_gen_extu_tl_i64(t3
, t1
);
3689 tcg_gen_mul_i64(t2
, t2
, t3
);
3690 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3691 tcg_gen_add_i64(t2
, t2
, t3
);
3692 tcg_temp_free_i64(t3
);
3693 gen_move_low32(cpu_LO
[acc
], t2
);
3694 gen_move_high32(cpu_HI
[acc
], t2
);
3696 gen_move_low32(cpu_gpr
[rd
], t2
);
3698 tcg_temp_free_i64(t2
);
3702 MIPS_INVAL("mul/madd TXx9");
3703 gen_reserved_instruction(ctx
);
3712 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
3722 gen_load_gpr(t0
, rs
);
3727 #if defined(TARGET_MIPS64)
3731 tcg_gen_not_tl(t0
, t0
);
3740 tcg_gen_ext32u_tl(t0
, t0
);
3741 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3742 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3744 #if defined(TARGET_MIPS64)
3749 tcg_gen_clzi_i64(t0
, t0
, 64);
3755 /* Godson integer instructions */
3756 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3757 int rd
, int rs
, int rt
)
3769 case OPC_MULTU_G_2E
:
3770 case OPC_MULTU_G_2F
:
3771 #if defined(TARGET_MIPS64)
3772 case OPC_DMULT_G_2E
:
3773 case OPC_DMULT_G_2F
:
3774 case OPC_DMULTU_G_2E
:
3775 case OPC_DMULTU_G_2F
:
3777 t0
= tcg_temp_new();
3778 t1
= tcg_temp_new();
3781 t0
= tcg_temp_local_new();
3782 t1
= tcg_temp_local_new();
3786 gen_load_gpr(t0
, rs
);
3787 gen_load_gpr(t1
, rt
);
3792 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3793 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3795 case OPC_MULTU_G_2E
:
3796 case OPC_MULTU_G_2F
:
3797 tcg_gen_ext32u_tl(t0
, t0
);
3798 tcg_gen_ext32u_tl(t1
, t1
);
3799 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3800 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3805 TCGLabel
*l1
= gen_new_label();
3806 TCGLabel
*l2
= gen_new_label();
3807 TCGLabel
*l3
= gen_new_label();
3808 tcg_gen_ext32s_tl(t0
, t0
);
3809 tcg_gen_ext32s_tl(t1
, t1
);
3810 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3811 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3814 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3815 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3816 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3819 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3820 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3827 TCGLabel
*l1
= gen_new_label();
3828 TCGLabel
*l2
= gen_new_label();
3829 tcg_gen_ext32u_tl(t0
, t0
);
3830 tcg_gen_ext32u_tl(t1
, t1
);
3831 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3832 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3835 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3836 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3843 TCGLabel
*l1
= gen_new_label();
3844 TCGLabel
*l2
= gen_new_label();
3845 TCGLabel
*l3
= gen_new_label();
3846 tcg_gen_ext32u_tl(t0
, t0
);
3847 tcg_gen_ext32u_tl(t1
, t1
);
3848 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3849 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3850 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3852 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3855 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3856 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3863 TCGLabel
*l1
= gen_new_label();
3864 TCGLabel
*l2
= gen_new_label();
3865 tcg_gen_ext32u_tl(t0
, t0
);
3866 tcg_gen_ext32u_tl(t1
, t1
);
3867 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3868 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3871 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3872 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3876 #if defined(TARGET_MIPS64)
3877 case OPC_DMULT_G_2E
:
3878 case OPC_DMULT_G_2F
:
3879 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3881 case OPC_DMULTU_G_2E
:
3882 case OPC_DMULTU_G_2F
:
3883 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3888 TCGLabel
*l1
= gen_new_label();
3889 TCGLabel
*l2
= gen_new_label();
3890 TCGLabel
*l3
= gen_new_label();
3891 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3892 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3895 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3896 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3897 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3900 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3904 case OPC_DDIVU_G_2E
:
3905 case OPC_DDIVU_G_2F
:
3907 TCGLabel
*l1
= gen_new_label();
3908 TCGLabel
*l2
= gen_new_label();
3909 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3910 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3913 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3920 TCGLabel
*l1
= gen_new_label();
3921 TCGLabel
*l2
= gen_new_label();
3922 TCGLabel
*l3
= gen_new_label();
3923 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3924 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3925 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3927 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3930 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3934 case OPC_DMODU_G_2E
:
3935 case OPC_DMODU_G_2F
:
3937 TCGLabel
*l1
= gen_new_label();
3938 TCGLabel
*l2
= gen_new_label();
3939 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3940 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3943 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3954 /* Loongson multimedia instructions */
3955 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3957 uint32_t opc
, shift_max
;
3961 opc
= MASK_LMMI(ctx
->opcode
);
3967 t0
= tcg_temp_local_new_i64();
3968 t1
= tcg_temp_local_new_i64();
3971 t0
= tcg_temp_new_i64();
3972 t1
= tcg_temp_new_i64();
3976 check_cp1_enabled(ctx
);
3977 gen_load_fpr64(ctx
, t0
, rs
);
3978 gen_load_fpr64(ctx
, t1
, rt
);
3982 gen_helper_paddsh(t0
, t0
, t1
);
3985 gen_helper_paddush(t0
, t0
, t1
);
3988 gen_helper_paddh(t0
, t0
, t1
);
3991 gen_helper_paddw(t0
, t0
, t1
);
3994 gen_helper_paddsb(t0
, t0
, t1
);
3997 gen_helper_paddusb(t0
, t0
, t1
);
4000 gen_helper_paddb(t0
, t0
, t1
);
4004 gen_helper_psubsh(t0
, t0
, t1
);
4007 gen_helper_psubush(t0
, t0
, t1
);
4010 gen_helper_psubh(t0
, t0
, t1
);
4013 gen_helper_psubw(t0
, t0
, t1
);
4016 gen_helper_psubsb(t0
, t0
, t1
);
4019 gen_helper_psubusb(t0
, t0
, t1
);
4022 gen_helper_psubb(t0
, t0
, t1
);
4026 gen_helper_pshufh(t0
, t0
, t1
);
4029 gen_helper_packsswh(t0
, t0
, t1
);
4032 gen_helper_packsshb(t0
, t0
, t1
);
4035 gen_helper_packushb(t0
, t0
, t1
);
4039 gen_helper_punpcklhw(t0
, t0
, t1
);
4042 gen_helper_punpckhhw(t0
, t0
, t1
);
4045 gen_helper_punpcklbh(t0
, t0
, t1
);
4048 gen_helper_punpckhbh(t0
, t0
, t1
);
4051 gen_helper_punpcklwd(t0
, t0
, t1
);
4054 gen_helper_punpckhwd(t0
, t0
, t1
);
4058 gen_helper_pavgh(t0
, t0
, t1
);
4061 gen_helper_pavgb(t0
, t0
, t1
);
4064 gen_helper_pmaxsh(t0
, t0
, t1
);
4067 gen_helper_pminsh(t0
, t0
, t1
);
4070 gen_helper_pmaxub(t0
, t0
, t1
);
4073 gen_helper_pminub(t0
, t0
, t1
);
4077 gen_helper_pcmpeqw(t0
, t0
, t1
);
4080 gen_helper_pcmpgtw(t0
, t0
, t1
);
4083 gen_helper_pcmpeqh(t0
, t0
, t1
);
4086 gen_helper_pcmpgth(t0
, t0
, t1
);
4089 gen_helper_pcmpeqb(t0
, t0
, t1
);
4092 gen_helper_pcmpgtb(t0
, t0
, t1
);
4096 gen_helper_psllw(t0
, t0
, t1
);
4099 gen_helper_psllh(t0
, t0
, t1
);
4102 gen_helper_psrlw(t0
, t0
, t1
);
4105 gen_helper_psrlh(t0
, t0
, t1
);
4108 gen_helper_psraw(t0
, t0
, t1
);
4111 gen_helper_psrah(t0
, t0
, t1
);
4115 gen_helper_pmullh(t0
, t0
, t1
);
4118 gen_helper_pmulhh(t0
, t0
, t1
);
4121 gen_helper_pmulhuh(t0
, t0
, t1
);
4124 gen_helper_pmaddhw(t0
, t0
, t1
);
4128 gen_helper_pasubub(t0
, t0
, t1
);
4131 gen_helper_biadd(t0
, t0
);
4134 gen_helper_pmovmskb(t0
, t0
);
4138 tcg_gen_add_i64(t0
, t0
, t1
);
4141 tcg_gen_sub_i64(t0
, t0
, t1
);
4144 tcg_gen_xor_i64(t0
, t0
, t1
);
4147 tcg_gen_nor_i64(t0
, t0
, t1
);
4150 tcg_gen_and_i64(t0
, t0
, t1
);
4153 tcg_gen_or_i64(t0
, t0
, t1
);
4157 tcg_gen_andc_i64(t0
, t1
, t0
);
4161 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4164 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4167 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4170 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4174 tcg_gen_andi_i64(t1
, t1
, 3);
4175 tcg_gen_shli_i64(t1
, t1
, 4);
4176 tcg_gen_shr_i64(t0
, t0
, t1
);
4177 tcg_gen_ext16u_i64(t0
, t0
);
4181 tcg_gen_add_i64(t0
, t0
, t1
);
4182 tcg_gen_ext32s_i64(t0
, t0
);
4185 tcg_gen_sub_i64(t0
, t0
, t1
);
4186 tcg_gen_ext32s_i64(t0
, t0
);
4208 /* Make sure shift count isn't TCG undefined behaviour. */
4209 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4214 tcg_gen_shl_i64(t0
, t0
, t1
);
4219 * Since SRA is UndefinedResult without sign-extended inputs,
4220 * we can treat SRA and DSRA the same.
4222 tcg_gen_sar_i64(t0
, t0
, t1
);
4225 /* We want to shift in zeros for SRL; zero-extend first. */
4226 tcg_gen_ext32u_i64(t0
, t0
);
4229 tcg_gen_shr_i64(t0
, t0
, t1
);
4233 if (shift_max
== 32) {
4234 tcg_gen_ext32s_i64(t0
, t0
);
4237 /* Shifts larger than MAX produce zero. */
4238 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4239 tcg_gen_neg_i64(t1
, t1
);
4240 tcg_gen_and_i64(t0
, t0
, t1
);
4246 TCGv_i64 t2
= tcg_temp_new_i64();
4247 TCGLabel
*lab
= gen_new_label();
4249 tcg_gen_mov_i64(t2
, t0
);
4250 tcg_gen_add_i64(t0
, t1
, t2
);
4251 if (opc
== OPC_ADD_CP2
) {
4252 tcg_gen_ext32s_i64(t0
, t0
);
4254 tcg_gen_xor_i64(t1
, t1
, t2
);
4255 tcg_gen_xor_i64(t2
, t2
, t0
);
4256 tcg_gen_andc_i64(t1
, t2
, t1
);
4257 tcg_temp_free_i64(t2
);
4258 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4259 generate_exception(ctx
, EXCP_OVERFLOW
);
4267 TCGv_i64 t2
= tcg_temp_new_i64();
4268 TCGLabel
*lab
= gen_new_label();
4270 tcg_gen_mov_i64(t2
, t0
);
4271 tcg_gen_sub_i64(t0
, t1
, t2
);
4272 if (opc
== OPC_SUB_CP2
) {
4273 tcg_gen_ext32s_i64(t0
, t0
);
4275 tcg_gen_xor_i64(t1
, t1
, t2
);
4276 tcg_gen_xor_i64(t2
, t2
, t0
);
4277 tcg_gen_and_i64(t1
, t1
, t2
);
4278 tcg_temp_free_i64(t2
);
4279 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4280 generate_exception(ctx
, EXCP_OVERFLOW
);
4286 tcg_gen_ext32u_i64(t0
, t0
);
4287 tcg_gen_ext32u_i64(t1
, t1
);
4288 tcg_gen_mul_i64(t0
, t0
, t1
);
4297 cond
= TCG_COND_LTU
;
4305 cond
= TCG_COND_LEU
;
4312 int cc
= (ctx
->opcode
>> 8) & 0x7;
4313 TCGv_i64 t64
= tcg_temp_new_i64();
4314 TCGv_i32 t32
= tcg_temp_new_i32();
4316 tcg_gen_setcond_i64(cond
, t64
, t0
, t1
);
4317 tcg_gen_extrl_i64_i32(t32
, t64
);
4318 tcg_gen_deposit_i32(fpu_fcr31
, fpu_fcr31
, t32
,
4321 tcg_temp_free_i32(t32
);
4322 tcg_temp_free_i64(t64
);
4327 MIPS_INVAL("loongson_cp2");
4328 gen_reserved_instruction(ctx
);
4332 gen_store_fpr64(ctx
, t0
, rd
);
4335 tcg_temp_free_i64(t0
);
4336 tcg_temp_free_i64(t1
);
4339 static void gen_loongson_lswc2(DisasContext
*ctx
, int rt
,
4344 #if defined(TARGET_MIPS64)
4345 int lsq_rt1
= ctx
->opcode
& 0x1f;
4346 int lsq_offset
= sextract32(ctx
->opcode
, 6, 9) << 4;
4348 int shf_offset
= sextract32(ctx
->opcode
, 6, 8);
4350 t0
= tcg_temp_new();
4352 switch (MASK_LOONGSON_GSLSQ(ctx
->opcode
)) {
4353 #if defined(TARGET_MIPS64)
4355 t1
= tcg_temp_new();
4356 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4357 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4358 ctx
->default_tcg_memop_mask
);
4359 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4360 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4361 ctx
->default_tcg_memop_mask
);
4362 gen_store_gpr(t1
, rt
);
4363 gen_store_gpr(t0
, lsq_rt1
);
4367 check_cp1_enabled(ctx
);
4368 t1
= tcg_temp_new();
4369 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4370 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4371 ctx
->default_tcg_memop_mask
);
4372 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4373 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4374 ctx
->default_tcg_memop_mask
);
4375 gen_store_fpr64(ctx
, t1
, rt
);
4376 gen_store_fpr64(ctx
, t0
, lsq_rt1
);
4380 t1
= tcg_temp_new();
4381 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4382 gen_load_gpr(t1
, rt
);
4383 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4384 ctx
->default_tcg_memop_mask
);
4385 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4386 gen_load_gpr(t1
, lsq_rt1
);
4387 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4388 ctx
->default_tcg_memop_mask
);
4392 check_cp1_enabled(ctx
);
4393 t1
= tcg_temp_new();
4394 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4395 gen_load_fpr64(ctx
, t1
, rt
);
4396 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4397 ctx
->default_tcg_memop_mask
);
4398 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4399 gen_load_fpr64(ctx
, t1
, lsq_rt1
);
4400 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4401 ctx
->default_tcg_memop_mask
);
4406 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4408 check_cp1_enabled(ctx
);
4409 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4410 t1
= tcg_temp_new();
4411 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4412 tcg_gen_andi_tl(t1
, t0
, 3);
4413 if (!cpu_is_bigendian(ctx
)) {
4414 tcg_gen_xori_tl(t1
, t1
, 3);
4416 tcg_gen_shli_tl(t1
, t1
, 3);
4417 tcg_gen_andi_tl(t0
, t0
, ~3);
4418 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4419 tcg_gen_shl_tl(t0
, t0
, t1
);
4420 t2
= tcg_const_tl(-1);
4421 tcg_gen_shl_tl(t2
, t2
, t1
);
4422 fp0
= tcg_temp_new_i32();
4423 gen_load_fpr32(ctx
, fp0
, rt
);
4424 tcg_gen_ext_i32_tl(t1
, fp0
);
4425 tcg_gen_andc_tl(t1
, t1
, t2
);
4427 tcg_gen_or_tl(t0
, t0
, t1
);
4429 #if defined(TARGET_MIPS64)
4430 tcg_gen_extrl_i64_i32(fp0
, t0
);
4432 tcg_gen_ext32s_tl(fp0
, t0
);
4434 gen_store_fpr32(ctx
, fp0
, rt
);
4435 tcg_temp_free_i32(fp0
);
4438 check_cp1_enabled(ctx
);
4439 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4440 t1
= tcg_temp_new();
4441 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4442 tcg_gen_andi_tl(t1
, t0
, 3);
4443 if (cpu_is_bigendian(ctx
)) {
4444 tcg_gen_xori_tl(t1
, t1
, 3);
4446 tcg_gen_shli_tl(t1
, t1
, 3);
4447 tcg_gen_andi_tl(t0
, t0
, ~3);
4448 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4449 tcg_gen_shr_tl(t0
, t0
, t1
);
4450 tcg_gen_xori_tl(t1
, t1
, 31);
4451 t2
= tcg_const_tl(0xfffffffeull
);
4452 tcg_gen_shl_tl(t2
, t2
, t1
);
4453 fp0
= tcg_temp_new_i32();
4454 gen_load_fpr32(ctx
, fp0
, rt
);
4455 tcg_gen_ext_i32_tl(t1
, fp0
);
4456 tcg_gen_and_tl(t1
, t1
, t2
);
4458 tcg_gen_or_tl(t0
, t0
, t1
);
4460 #if defined(TARGET_MIPS64)
4461 tcg_gen_extrl_i64_i32(fp0
, t0
);
4463 tcg_gen_ext32s_tl(fp0
, t0
);
4465 gen_store_fpr32(ctx
, fp0
, rt
);
4466 tcg_temp_free_i32(fp0
);
4468 #if defined(TARGET_MIPS64)
4470 check_cp1_enabled(ctx
);
4471 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4472 t1
= tcg_temp_new();
4473 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4474 tcg_gen_andi_tl(t1
, t0
, 7);
4475 if (!cpu_is_bigendian(ctx
)) {
4476 tcg_gen_xori_tl(t1
, t1
, 7);
4478 tcg_gen_shli_tl(t1
, t1
, 3);
4479 tcg_gen_andi_tl(t0
, t0
, ~7);
4480 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4481 tcg_gen_shl_tl(t0
, t0
, t1
);
4482 t2
= tcg_const_tl(-1);
4483 tcg_gen_shl_tl(t2
, t2
, t1
);
4484 gen_load_fpr64(ctx
, t1
, rt
);
4485 tcg_gen_andc_tl(t1
, t1
, t2
);
4487 tcg_gen_or_tl(t0
, t0
, t1
);
4489 gen_store_fpr64(ctx
, t0
, rt
);
4492 check_cp1_enabled(ctx
);
4493 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4494 t1
= tcg_temp_new();
4495 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4496 tcg_gen_andi_tl(t1
, t0
, 7);
4497 if (cpu_is_bigendian(ctx
)) {
4498 tcg_gen_xori_tl(t1
, t1
, 7);
4500 tcg_gen_shli_tl(t1
, t1
, 3);
4501 tcg_gen_andi_tl(t0
, t0
, ~7);
4502 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4503 tcg_gen_shr_tl(t0
, t0
, t1
);
4504 tcg_gen_xori_tl(t1
, t1
, 63);
4505 t2
= tcg_const_tl(0xfffffffffffffffeull
);
4506 tcg_gen_shl_tl(t2
, t2
, t1
);
4507 gen_load_fpr64(ctx
, t1
, rt
);
4508 tcg_gen_and_tl(t1
, t1
, t2
);
4510 tcg_gen_or_tl(t0
, t0
, t1
);
4512 gen_store_fpr64(ctx
, t0
, rt
);
4516 MIPS_INVAL("loongson_gsshfl");
4517 gen_reserved_instruction(ctx
);
4522 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4524 check_cp1_enabled(ctx
);
4525 t1
= tcg_temp_new();
4526 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4527 fp0
= tcg_temp_new_i32();
4528 gen_load_fpr32(ctx
, fp0
, rt
);
4529 tcg_gen_ext_i32_tl(t1
, fp0
);
4530 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
4531 tcg_temp_free_i32(fp0
);
4535 check_cp1_enabled(ctx
);
4536 t1
= tcg_temp_new();
4537 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4538 fp0
= tcg_temp_new_i32();
4539 gen_load_fpr32(ctx
, fp0
, rt
);
4540 tcg_gen_ext_i32_tl(t1
, fp0
);
4541 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
4542 tcg_temp_free_i32(fp0
);
4545 #if defined(TARGET_MIPS64)
4547 check_cp1_enabled(ctx
);
4548 t1
= tcg_temp_new();
4549 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4550 gen_load_fpr64(ctx
, t1
, rt
);
4551 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
4555 check_cp1_enabled(ctx
);
4556 t1
= tcg_temp_new();
4557 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4558 gen_load_fpr64(ctx
, t1
, rt
);
4559 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
4564 MIPS_INVAL("loongson_gsshfs");
4565 gen_reserved_instruction(ctx
);
4570 MIPS_INVAL("loongson_gslsq");
4571 gen_reserved_instruction(ctx
);
4577 /* Loongson EXT LDC2/SDC2 */
4578 static void gen_loongson_lsdc2(DisasContext
*ctx
, int rt
,
4581 int offset
= sextract32(ctx
->opcode
, 3, 8);
4582 uint32_t opc
= MASK_LOONGSON_LSDC2(ctx
->opcode
);
4586 /* Pre-conditions */
4592 /* prefetch, implement as NOP */
4603 #if defined(TARGET_MIPS64)
4606 check_cp1_enabled(ctx
);
4607 /* prefetch, implement as NOP */
4613 #if defined(TARGET_MIPS64)
4616 check_cp1_enabled(ctx
);
4619 MIPS_INVAL("loongson_lsdc2");
4620 gen_reserved_instruction(ctx
);
4625 t0
= tcg_temp_new();
4627 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4628 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4632 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
4633 gen_store_gpr(t0
, rt
);
4636 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
4637 ctx
->default_tcg_memop_mask
);
4638 gen_store_gpr(t0
, rt
);
4641 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4643 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4645 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
4646 ctx
->default_tcg_memop_mask
);
4647 gen_store_gpr(t0
, rt
);
4649 #if defined(TARGET_MIPS64)
4651 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4653 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4655 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4656 ctx
->default_tcg_memop_mask
);
4657 gen_store_gpr(t0
, rt
);
4661 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4663 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4665 fp0
= tcg_temp_new_i32();
4666 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
4667 ctx
->default_tcg_memop_mask
);
4668 gen_store_fpr32(ctx
, fp0
, rt
);
4669 tcg_temp_free_i32(fp0
);
4671 #if defined(TARGET_MIPS64)
4673 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4675 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4677 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4678 ctx
->default_tcg_memop_mask
);
4679 gen_store_fpr64(ctx
, t0
, rt
);
4683 t1
= tcg_temp_new();
4684 gen_load_gpr(t1
, rt
);
4685 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
4689 t1
= tcg_temp_new();
4690 gen_load_gpr(t1
, rt
);
4691 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
4692 ctx
->default_tcg_memop_mask
);
4696 t1
= tcg_temp_new();
4697 gen_load_gpr(t1
, rt
);
4698 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
4699 ctx
->default_tcg_memop_mask
);
4702 #if defined(TARGET_MIPS64)
4704 t1
= tcg_temp_new();
4705 gen_load_gpr(t1
, rt
);
4706 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4707 ctx
->default_tcg_memop_mask
);
4712 fp0
= tcg_temp_new_i32();
4713 gen_load_fpr32(ctx
, fp0
, rt
);
4714 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
4715 ctx
->default_tcg_memop_mask
);
4716 tcg_temp_free_i32(fp0
);
4718 #if defined(TARGET_MIPS64)
4720 t1
= tcg_temp_new();
4721 gen_load_fpr64(ctx
, t1
, rt
);
4722 tcg_gen_qemu_st_i64(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4723 ctx
->default_tcg_memop_mask
);
4735 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
4736 int rs
, int rt
, int16_t imm
, int code
)
4739 TCGv t0
= tcg_temp_new();
4740 TCGv t1
= tcg_temp_new();
4743 /* Load needed operands */
4751 /* Compare two registers */
4753 gen_load_gpr(t0
, rs
);
4754 gen_load_gpr(t1
, rt
);
4764 /* Compare register to immediate */
4765 if (rs
!= 0 || imm
!= 0) {
4766 gen_load_gpr(t0
, rs
);
4767 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4774 case OPC_TEQ
: /* rs == rs */
4775 case OPC_TEQI
: /* r0 == 0 */
4776 case OPC_TGE
: /* rs >= rs */
4777 case OPC_TGEI
: /* r0 >= 0 */
4778 case OPC_TGEU
: /* rs >= rs unsigned */
4779 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4781 #ifdef CONFIG_USER_ONLY
4782 /* Pass the break code along to cpu_loop. */
4783 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4784 offsetof(CPUMIPSState
, error_code
));
4786 generate_exception_end(ctx
, EXCP_TRAP
);
4788 case OPC_TLT
: /* rs < rs */
4789 case OPC_TLTI
: /* r0 < 0 */
4790 case OPC_TLTU
: /* rs < rs unsigned */
4791 case OPC_TLTIU
: /* r0 < 0 unsigned */
4792 case OPC_TNE
: /* rs != rs */
4793 case OPC_TNEI
: /* r0 != 0 */
4794 /* Never trap: treat as NOP. */
4798 TCGLabel
*l1
= gen_new_label();
4803 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4807 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4811 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4815 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4819 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4823 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4826 #ifdef CONFIG_USER_ONLY
4827 /* Pass the break code along to cpu_loop. */
4828 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4829 offsetof(CPUMIPSState
, error_code
));
4831 /* Like save_cpu_state, only don't update saved values. */
4832 if (ctx
->base
.pc_next
!= ctx
->saved_pc
) {
4833 gen_save_pc(ctx
->base
.pc_next
);
4835 if (ctx
->hflags
!= ctx
->saved_hflags
) {
4836 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
4838 generate_exception(ctx
, EXCP_TRAP
);
4845 static void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4847 if (translator_use_goto_tb(&ctx
->base
, dest
)) {
4850 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4853 tcg_gen_lookup_and_goto_ptr();
4857 /* Branches (before delay slot) */
4858 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
4860 int rs
, int rt
, int32_t offset
,
4863 target_ulong btgt
= -1;
4865 int bcond_compute
= 0;
4866 TCGv t0
= tcg_temp_new();
4867 TCGv t1
= tcg_temp_new();
4869 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4870 #ifdef MIPS_DEBUG_DISAS
4871 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4872 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4874 gen_reserved_instruction(ctx
);
4878 /* Load needed operands */
4884 /* Compare two registers */
4886 gen_load_gpr(t0
, rs
);
4887 gen_load_gpr(t1
, rt
);
4890 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4904 /* Compare to zero */
4906 gen_load_gpr(t0
, rs
);
4909 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4912 #if defined(TARGET_MIPS64)
4914 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4916 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4919 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4924 /* Jump to immediate */
4925 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4930 /* Jump to register */
4931 if (offset
!= 0 && offset
!= 16) {
4933 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4934 * others are reserved.
4936 MIPS_INVAL("jump hint");
4937 gen_reserved_instruction(ctx
);
4940 gen_load_gpr(btarget
, rs
);
4943 MIPS_INVAL("branch/jump");
4944 gen_reserved_instruction(ctx
);
4947 if (bcond_compute
== 0) {
4948 /* No condition to be computed */
4950 case OPC_BEQ
: /* rx == rx */
4951 case OPC_BEQL
: /* rx == rx likely */
4952 case OPC_BGEZ
: /* 0 >= 0 */
4953 case OPC_BGEZL
: /* 0 >= 0 likely */
4954 case OPC_BLEZ
: /* 0 <= 0 */
4955 case OPC_BLEZL
: /* 0 <= 0 likely */
4957 ctx
->hflags
|= MIPS_HFLAG_B
;
4959 case OPC_BGEZAL
: /* 0 >= 0 */
4960 case OPC_BGEZALL
: /* 0 >= 0 likely */
4961 /* Always take and link */
4963 ctx
->hflags
|= MIPS_HFLAG_B
;
4965 case OPC_BNE
: /* rx != rx */
4966 case OPC_BGTZ
: /* 0 > 0 */
4967 case OPC_BLTZ
: /* 0 < 0 */
4970 case OPC_BLTZAL
: /* 0 < 0 */
4972 * Handle as an unconditional branch to get correct delay
4976 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4977 ctx
->hflags
|= MIPS_HFLAG_B
;
4979 case OPC_BLTZALL
: /* 0 < 0 likely */
4980 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4981 /* Skip the instruction in the delay slot */
4982 ctx
->base
.pc_next
+= 4;
4984 case OPC_BNEL
: /* rx != rx likely */
4985 case OPC_BGTZL
: /* 0 > 0 likely */
4986 case OPC_BLTZL
: /* 0 < 0 likely */
4987 /* Skip the instruction in the delay slot */
4988 ctx
->base
.pc_next
+= 4;
4991 ctx
->hflags
|= MIPS_HFLAG_B
;
4994 ctx
->hflags
|= MIPS_HFLAG_BX
;
4998 ctx
->hflags
|= MIPS_HFLAG_B
;
5001 ctx
->hflags
|= MIPS_HFLAG_BR
;
5005 ctx
->hflags
|= MIPS_HFLAG_BR
;
5008 MIPS_INVAL("branch/jump");
5009 gen_reserved_instruction(ctx
);
5015 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5018 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5021 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5024 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5027 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5030 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5033 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5037 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5041 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5044 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5047 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5050 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5053 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5056 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5059 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5061 #if defined(TARGET_MIPS64)
5063 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5067 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5070 ctx
->hflags
|= MIPS_HFLAG_BC
;
5073 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5076 ctx
->hflags
|= MIPS_HFLAG_BL
;
5079 MIPS_INVAL("conditional branch/jump");
5080 gen_reserved_instruction(ctx
);
5085 ctx
->btarget
= btgt
;
5087 switch (delayslot_size
) {
5089 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5092 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5097 int post_delay
= insn_bytes
+ delayslot_size
;
5098 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5100 tcg_gen_movi_tl(cpu_gpr
[blink
],
5101 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5105 if (insn_bytes
== 2) {
5106 ctx
->hflags
|= MIPS_HFLAG_B16
;
5113 /* special3 bitfield operations */
5114 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
5115 int rs
, int lsb
, int msb
)
5117 TCGv t0
= tcg_temp_new();
5118 TCGv t1
= tcg_temp_new();
5120 gen_load_gpr(t1
, rs
);
5123 if (lsb
+ msb
> 31) {
5127 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5130 * The two checks together imply that lsb == 0,
5131 * so this is a simple sign-extension.
5133 tcg_gen_ext32s_tl(t0
, t1
);
5136 #if defined(TARGET_MIPS64)
5145 if (lsb
+ msb
> 63) {
5148 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5155 gen_load_gpr(t0
, rt
);
5156 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5157 tcg_gen_ext32s_tl(t0
, t0
);
5159 #if defined(TARGET_MIPS64)
5170 gen_load_gpr(t0
, rt
);
5171 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5176 MIPS_INVAL("bitops");
5177 gen_reserved_instruction(ctx
);
5182 gen_store_gpr(t0
, rt
);
5187 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5192 /* If no destination, treat it as a NOP. */
5196 t0
= tcg_temp_new();
5197 gen_load_gpr(t0
, rt
);
5201 TCGv t1
= tcg_temp_new();
5202 TCGv t2
= tcg_const_tl(0x00FF00FF);
5204 tcg_gen_shri_tl(t1
, t0
, 8);
5205 tcg_gen_and_tl(t1
, t1
, t2
);
5206 tcg_gen_and_tl(t0
, t0
, t2
);
5207 tcg_gen_shli_tl(t0
, t0
, 8);
5208 tcg_gen_or_tl(t0
, t0
, t1
);
5211 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5215 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
5218 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
5220 #if defined(TARGET_MIPS64)
5223 TCGv t1
= tcg_temp_new();
5224 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
5226 tcg_gen_shri_tl(t1
, t0
, 8);
5227 tcg_gen_and_tl(t1
, t1
, t2
);
5228 tcg_gen_and_tl(t0
, t0
, t2
);
5229 tcg_gen_shli_tl(t0
, t0
, 8);
5230 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5237 TCGv t1
= tcg_temp_new();
5238 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
5240 tcg_gen_shri_tl(t1
, t0
, 16);
5241 tcg_gen_and_tl(t1
, t1
, t2
);
5242 tcg_gen_and_tl(t0
, t0
, t2
);
5243 tcg_gen_shli_tl(t0
, t0
, 16);
5244 tcg_gen_or_tl(t0
, t0
, t1
);
5245 tcg_gen_shri_tl(t1
, t0
, 32);
5246 tcg_gen_shli_tl(t0
, t0
, 32);
5247 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5254 MIPS_INVAL("bsfhl");
5255 gen_reserved_instruction(ctx
);
5262 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
5270 t0
= tcg_temp_new();
5271 if (bits
== 0 || bits
== wordsz
) {
5273 gen_load_gpr(t0
, rt
);
5275 gen_load_gpr(t0
, rs
);
5279 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5281 #if defined(TARGET_MIPS64)
5283 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5288 TCGv t1
= tcg_temp_new();
5289 gen_load_gpr(t0
, rt
);
5290 gen_load_gpr(t1
, rs
);
5294 TCGv_i64 t2
= tcg_temp_new_i64();
5295 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5296 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5297 gen_move_low32(cpu_gpr
[rd
], t2
);
5298 tcg_temp_free_i64(t2
);
5301 #if defined(TARGET_MIPS64)
5303 tcg_gen_shli_tl(t0
, t0
, bits
);
5304 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5305 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5315 void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
, int bp
)
5317 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5320 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5327 t0
= tcg_temp_new();
5328 gen_load_gpr(t0
, rt
);
5331 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5333 #if defined(TARGET_MIPS64)
5335 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5342 #ifndef CONFIG_USER_ONLY
5343 /* CP0 (MMU and control) */
5344 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5346 TCGv_i64 t0
= tcg_temp_new_i64();
5347 TCGv_i64 t1
= tcg_temp_new_i64();
5349 tcg_gen_ext_tl_i64(t0
, arg
);
5350 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5351 #if defined(TARGET_MIPS64)
5352 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5354 tcg_gen_concat32_i64(t1
, t1
, t0
);
5356 tcg_gen_st_i64(t1
, cpu_env
, off
);
5357 tcg_temp_free_i64(t1
);
5358 tcg_temp_free_i64(t0
);
5361 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5363 TCGv_i64 t0
= tcg_temp_new_i64();
5364 TCGv_i64 t1
= tcg_temp_new_i64();
5366 tcg_gen_ext_tl_i64(t0
, arg
);
5367 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5368 tcg_gen_concat32_i64(t1
, t1
, t0
);
5369 tcg_gen_st_i64(t1
, cpu_env
, off
);
5370 tcg_temp_free_i64(t1
);
5371 tcg_temp_free_i64(t0
);
5374 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5376 TCGv_i64 t0
= tcg_temp_new_i64();
5378 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5379 #if defined(TARGET_MIPS64)
5380 tcg_gen_shri_i64(t0
, t0
, 30);
5382 tcg_gen_shri_i64(t0
, t0
, 32);
5384 gen_move_low32(arg
, t0
);
5385 tcg_temp_free_i64(t0
);
5388 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5390 TCGv_i64 t0
= tcg_temp_new_i64();
5392 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5393 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5394 gen_move_low32(arg
, t0
);
5395 tcg_temp_free_i64(t0
);
5398 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
5400 TCGv_i32 t0
= tcg_temp_new_i32();
5402 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5403 tcg_gen_ext_i32_tl(arg
, t0
);
5404 tcg_temp_free_i32(t0
);
5407 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
5409 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5410 tcg_gen_ext32s_tl(arg
, arg
);
5413 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
5415 TCGv_i32 t0
= tcg_temp_new_i32();
5417 tcg_gen_trunc_tl_i32(t0
, arg
);
5418 tcg_gen_st_i32(t0
, cpu_env
, off
);
5419 tcg_temp_free_i32(t0
);
5422 #define CP0_CHECK(c) \
5425 goto cp0_unimplemented; \
5429 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5431 const char *register_name
= "invalid";
5434 case CP0_REGISTER_02
:
5437 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5438 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5439 register_name
= "EntryLo0";
5442 goto cp0_unimplemented
;
5445 case CP0_REGISTER_03
:
5447 case CP0_REG03__ENTRYLO1
:
5448 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5449 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5450 register_name
= "EntryLo1";
5453 goto cp0_unimplemented
;
5456 case CP0_REGISTER_09
:
5458 case CP0_REG09__SAAR
:
5459 CP0_CHECK(ctx
->saar
);
5460 gen_helper_mfhc0_saar(arg
, cpu_env
);
5461 register_name
= "SAAR";
5464 goto cp0_unimplemented
;
5467 case CP0_REGISTER_17
:
5469 case CP0_REG17__LLADDR
:
5470 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
5471 ctx
->CP0_LLAddr_shift
);
5472 register_name
= "LLAddr";
5474 case CP0_REG17__MAAR
:
5475 CP0_CHECK(ctx
->mrp
);
5476 gen_helper_mfhc0_maar(arg
, cpu_env
);
5477 register_name
= "MAAR";
5480 goto cp0_unimplemented
;
5483 case CP0_REGISTER_19
:
5485 case CP0_REG19__WATCHHI0
:
5486 case CP0_REG19__WATCHHI1
:
5487 case CP0_REG19__WATCHHI2
:
5488 case CP0_REG19__WATCHHI3
:
5489 case CP0_REG19__WATCHHI4
:
5490 case CP0_REG19__WATCHHI5
:
5491 case CP0_REG19__WATCHHI6
:
5492 case CP0_REG19__WATCHHI7
:
5493 /* upper 32 bits are only available when Config5MI != 0 */
5495 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_WatchHi
[sel
]), 0);
5496 register_name
= "WatchHi";
5499 goto cp0_unimplemented
;
5502 case CP0_REGISTER_28
:
5508 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5509 register_name
= "TagLo";
5512 goto cp0_unimplemented
;
5516 goto cp0_unimplemented
;
5518 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
5522 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
5523 register_name
, reg
, sel
);
5524 tcg_gen_movi_tl(arg
, 0);
5527 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5529 const char *register_name
= "invalid";
5530 uint64_t mask
= ctx
->PAMask
>> 36;
5533 case CP0_REGISTER_02
:
5536 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5537 tcg_gen_andi_tl(arg
, arg
, mask
);
5538 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5539 register_name
= "EntryLo0";
5542 goto cp0_unimplemented
;
5545 case CP0_REGISTER_03
:
5547 case CP0_REG03__ENTRYLO1
:
5548 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5549 tcg_gen_andi_tl(arg
, arg
, mask
);
5550 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5551 register_name
= "EntryLo1";
5554 goto cp0_unimplemented
;
5557 case CP0_REGISTER_09
:
5559 case CP0_REG09__SAAR
:
5560 CP0_CHECK(ctx
->saar
);
5561 gen_helper_mthc0_saar(cpu_env
, arg
);
5562 register_name
= "SAAR";
5565 goto cp0_unimplemented
;
5568 case CP0_REGISTER_17
:
5570 case CP0_REG17__LLADDR
:
5572 * LLAddr is read-only (the only exception is bit 0 if LLB is
5573 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
5574 * relevant for modern MIPS cores supporting MTHC0, therefore
5575 * treating MTHC0 to LLAddr as NOP.
5577 register_name
= "LLAddr";
5579 case CP0_REG17__MAAR
:
5580 CP0_CHECK(ctx
->mrp
);
5581 gen_helper_mthc0_maar(cpu_env
, arg
);
5582 register_name
= "MAAR";
5585 goto cp0_unimplemented
;
5588 case CP0_REGISTER_19
:
5590 case CP0_REG19__WATCHHI0
:
5591 case CP0_REG19__WATCHHI1
:
5592 case CP0_REG19__WATCHHI2
:
5593 case CP0_REG19__WATCHHI3
:
5594 case CP0_REG19__WATCHHI4
:
5595 case CP0_REG19__WATCHHI5
:
5596 case CP0_REG19__WATCHHI6
:
5597 case CP0_REG19__WATCHHI7
:
5598 /* upper 32 bits are only available when Config5MI != 0 */
5600 gen_helper_0e1i(mthc0_watchhi
, arg
, sel
);
5601 register_name
= "WatchHi";
5604 goto cp0_unimplemented
;
5607 case CP0_REGISTER_28
:
5613 tcg_gen_andi_tl(arg
, arg
, mask
);
5614 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5615 register_name
= "TagLo";
5618 goto cp0_unimplemented
;
5622 goto cp0_unimplemented
;
5624 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
5628 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
5629 register_name
, reg
, sel
);
5632 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5634 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
5635 tcg_gen_movi_tl(arg
, 0);
5637 tcg_gen_movi_tl(arg
, ~0);
5641 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5643 const char *register_name
= "invalid";
5646 check_insn(ctx
, ISA_MIPS_R1
);
5650 case CP0_REGISTER_00
:
5652 case CP0_REG00__INDEX
:
5653 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5654 register_name
= "Index";
5656 case CP0_REG00__MVPCONTROL
:
5657 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5658 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5659 register_name
= "MVPControl";
5661 case CP0_REG00__MVPCONF0
:
5662 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5663 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5664 register_name
= "MVPConf0";
5666 case CP0_REG00__MVPCONF1
:
5667 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5668 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5669 register_name
= "MVPConf1";
5671 case CP0_REG00__VPCONTROL
:
5673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5674 register_name
= "VPControl";
5677 goto cp0_unimplemented
;
5680 case CP0_REGISTER_01
:
5682 case CP0_REG01__RANDOM
:
5683 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
5684 gen_helper_mfc0_random(arg
, cpu_env
);
5685 register_name
= "Random";
5687 case CP0_REG01__VPECONTROL
:
5688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5689 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5690 register_name
= "VPEControl";
5692 case CP0_REG01__VPECONF0
:
5693 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5694 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5695 register_name
= "VPEConf0";
5697 case CP0_REG01__VPECONF1
:
5698 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5699 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5700 register_name
= "VPEConf1";
5702 case CP0_REG01__YQMASK
:
5703 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5704 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5705 register_name
= "YQMask";
5707 case CP0_REG01__VPESCHEDULE
:
5708 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5709 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5710 register_name
= "VPESchedule";
5712 case CP0_REG01__VPESCHEFBACK
:
5713 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5714 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5715 register_name
= "VPEScheFBack";
5717 case CP0_REG01__VPEOPT
:
5718 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5719 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5720 register_name
= "VPEOpt";
5723 goto cp0_unimplemented
;
5726 case CP0_REGISTER_02
:
5728 case CP0_REG02__ENTRYLO0
:
5730 TCGv_i64 tmp
= tcg_temp_new_i64();
5731 tcg_gen_ld_i64(tmp
, cpu_env
,
5732 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5733 #if defined(TARGET_MIPS64)
5735 /* Move RI/XI fields to bits 31:30 */
5736 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5737 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5740 gen_move_low32(arg
, tmp
);
5741 tcg_temp_free_i64(tmp
);
5743 register_name
= "EntryLo0";
5745 case CP0_REG02__TCSTATUS
:
5746 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5747 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5748 register_name
= "TCStatus";
5750 case CP0_REG02__TCBIND
:
5751 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5752 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5753 register_name
= "TCBind";
5755 case CP0_REG02__TCRESTART
:
5756 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5757 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5758 register_name
= "TCRestart";
5760 case CP0_REG02__TCHALT
:
5761 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5762 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5763 register_name
= "TCHalt";
5765 case CP0_REG02__TCCONTEXT
:
5766 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5767 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5768 register_name
= "TCContext";
5770 case CP0_REG02__TCSCHEDULE
:
5771 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5772 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5773 register_name
= "TCSchedule";
5775 case CP0_REG02__TCSCHEFBACK
:
5776 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5777 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5778 register_name
= "TCScheFBack";
5781 goto cp0_unimplemented
;
5784 case CP0_REGISTER_03
:
5786 case CP0_REG03__ENTRYLO1
:
5788 TCGv_i64 tmp
= tcg_temp_new_i64();
5789 tcg_gen_ld_i64(tmp
, cpu_env
,
5790 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5791 #if defined(TARGET_MIPS64)
5793 /* Move RI/XI fields to bits 31:30 */
5794 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5795 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5798 gen_move_low32(arg
, tmp
);
5799 tcg_temp_free_i64(tmp
);
5801 register_name
= "EntryLo1";
5803 case CP0_REG03__GLOBALNUM
:
5805 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5806 register_name
= "GlobalNumber";
5809 goto cp0_unimplemented
;
5812 case CP0_REGISTER_04
:
5814 case CP0_REG04__CONTEXT
:
5815 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5816 tcg_gen_ext32s_tl(arg
, arg
);
5817 register_name
= "Context";
5819 case CP0_REG04__CONTEXTCONFIG
:
5821 /* gen_helper_mfc0_contextconfig(arg); */
5822 register_name
= "ContextConfig";
5823 goto cp0_unimplemented
;
5824 case CP0_REG04__USERLOCAL
:
5825 CP0_CHECK(ctx
->ulri
);
5826 tcg_gen_ld_tl(arg
, cpu_env
,
5827 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5828 tcg_gen_ext32s_tl(arg
, arg
);
5829 register_name
= "UserLocal";
5831 case CP0_REG04__MMID
:
5833 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
5834 register_name
= "MMID";
5837 goto cp0_unimplemented
;
5840 case CP0_REGISTER_05
:
5842 case CP0_REG05__PAGEMASK
:
5843 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5844 register_name
= "PageMask";
5846 case CP0_REG05__PAGEGRAIN
:
5847 check_insn(ctx
, ISA_MIPS_R2
);
5848 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5849 register_name
= "PageGrain";
5851 case CP0_REG05__SEGCTL0
:
5853 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5854 tcg_gen_ext32s_tl(arg
, arg
);
5855 register_name
= "SegCtl0";
5857 case CP0_REG05__SEGCTL1
:
5859 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5860 tcg_gen_ext32s_tl(arg
, arg
);
5861 register_name
= "SegCtl1";
5863 case CP0_REG05__SEGCTL2
:
5865 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5866 tcg_gen_ext32s_tl(arg
, arg
);
5867 register_name
= "SegCtl2";
5869 case CP0_REG05__PWBASE
:
5871 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
5872 register_name
= "PWBase";
5874 case CP0_REG05__PWFIELD
:
5876 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
5877 register_name
= "PWField";
5879 case CP0_REG05__PWSIZE
:
5881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
5882 register_name
= "PWSize";
5885 goto cp0_unimplemented
;
5888 case CP0_REGISTER_06
:
5890 case CP0_REG06__WIRED
:
5891 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5892 register_name
= "Wired";
5894 case CP0_REG06__SRSCONF0
:
5895 check_insn(ctx
, ISA_MIPS_R2
);
5896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5897 register_name
= "SRSConf0";
5899 case CP0_REG06__SRSCONF1
:
5900 check_insn(ctx
, ISA_MIPS_R2
);
5901 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5902 register_name
= "SRSConf1";
5904 case CP0_REG06__SRSCONF2
:
5905 check_insn(ctx
, ISA_MIPS_R2
);
5906 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5907 register_name
= "SRSConf2";
5909 case CP0_REG06__SRSCONF3
:
5910 check_insn(ctx
, ISA_MIPS_R2
);
5911 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5912 register_name
= "SRSConf3";
5914 case CP0_REG06__SRSCONF4
:
5915 check_insn(ctx
, ISA_MIPS_R2
);
5916 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5917 register_name
= "SRSConf4";
5919 case CP0_REG06__PWCTL
:
5921 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
5922 register_name
= "PWCtl";
5925 goto cp0_unimplemented
;
5928 case CP0_REGISTER_07
:
5930 case CP0_REG07__HWRENA
:
5931 check_insn(ctx
, ISA_MIPS_R2
);
5932 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5933 register_name
= "HWREna";
5936 goto cp0_unimplemented
;
5939 case CP0_REGISTER_08
:
5941 case CP0_REG08__BADVADDR
:
5942 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5943 tcg_gen_ext32s_tl(arg
, arg
);
5944 register_name
= "BadVAddr";
5946 case CP0_REG08__BADINSTR
:
5948 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5949 register_name
= "BadInstr";
5951 case CP0_REG08__BADINSTRP
:
5953 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5954 register_name
= "BadInstrP";
5956 case CP0_REG08__BADINSTRX
:
5958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
5959 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
5960 register_name
= "BadInstrX";
5963 goto cp0_unimplemented
;
5966 case CP0_REGISTER_09
:
5968 case CP0_REG09__COUNT
:
5969 /* Mark as an IO operation because we read the time. */
5970 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5973 gen_helper_mfc0_count(arg
, cpu_env
);
5975 * Break the TB to be able to take timer interrupts immediately
5976 * after reading count. DISAS_STOP isn't sufficient, we need to
5977 * ensure we break completely out of translated code.
5979 gen_save_pc(ctx
->base
.pc_next
+ 4);
5980 ctx
->base
.is_jmp
= DISAS_EXIT
;
5981 register_name
= "Count";
5983 case CP0_REG09__SAARI
:
5984 CP0_CHECK(ctx
->saar
);
5985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
5986 register_name
= "SAARI";
5988 case CP0_REG09__SAAR
:
5989 CP0_CHECK(ctx
->saar
);
5990 gen_helper_mfc0_saar(arg
, cpu_env
);
5991 register_name
= "SAAR";
5994 goto cp0_unimplemented
;
5997 case CP0_REGISTER_10
:
5999 case CP0_REG10__ENTRYHI
:
6000 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6001 tcg_gen_ext32s_tl(arg
, arg
);
6002 register_name
= "EntryHi";
6005 goto cp0_unimplemented
;
6008 case CP0_REGISTER_11
:
6010 case CP0_REG11__COMPARE
:
6011 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6012 register_name
= "Compare";
6014 /* 6,7 are implementation dependent */
6016 goto cp0_unimplemented
;
6019 case CP0_REGISTER_12
:
6021 case CP0_REG12__STATUS
:
6022 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6023 register_name
= "Status";
6025 case CP0_REG12__INTCTL
:
6026 check_insn(ctx
, ISA_MIPS_R2
);
6027 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6028 register_name
= "IntCtl";
6030 case CP0_REG12__SRSCTL
:
6031 check_insn(ctx
, ISA_MIPS_R2
);
6032 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6033 register_name
= "SRSCtl";
6035 case CP0_REG12__SRSMAP
:
6036 check_insn(ctx
, ISA_MIPS_R2
);
6037 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6038 register_name
= "SRSMap";
6041 goto cp0_unimplemented
;
6044 case CP0_REGISTER_13
:
6046 case CP0_REG13__CAUSE
:
6047 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6048 register_name
= "Cause";
6051 goto cp0_unimplemented
;
6054 case CP0_REGISTER_14
:
6056 case CP0_REG14__EPC
:
6057 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6058 tcg_gen_ext32s_tl(arg
, arg
);
6059 register_name
= "EPC";
6062 goto cp0_unimplemented
;
6065 case CP0_REGISTER_15
:
6067 case CP0_REG15__PRID
:
6068 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6069 register_name
= "PRid";
6071 case CP0_REG15__EBASE
:
6072 check_insn(ctx
, ISA_MIPS_R2
);
6073 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6074 tcg_gen_ext32s_tl(arg
, arg
);
6075 register_name
= "EBase";
6077 case CP0_REG15__CMGCRBASE
:
6078 check_insn(ctx
, ISA_MIPS_R2
);
6079 CP0_CHECK(ctx
->cmgcr
);
6080 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6081 tcg_gen_ext32s_tl(arg
, arg
);
6082 register_name
= "CMGCRBase";
6085 goto cp0_unimplemented
;
6088 case CP0_REGISTER_16
:
6090 case CP0_REG16__CONFIG
:
6091 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6092 register_name
= "Config";
6094 case CP0_REG16__CONFIG1
:
6095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6096 register_name
= "Config1";
6098 case CP0_REG16__CONFIG2
:
6099 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6100 register_name
= "Config2";
6102 case CP0_REG16__CONFIG3
:
6103 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6104 register_name
= "Config3";
6106 case CP0_REG16__CONFIG4
:
6107 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6108 register_name
= "Config4";
6110 case CP0_REG16__CONFIG5
:
6111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6112 register_name
= "Config5";
6114 /* 6,7 are implementation dependent */
6115 case CP0_REG16__CONFIG6
:
6116 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6117 register_name
= "Config6";
6119 case CP0_REG16__CONFIG7
:
6120 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6121 register_name
= "Config7";
6124 goto cp0_unimplemented
;
6127 case CP0_REGISTER_17
:
6129 case CP0_REG17__LLADDR
:
6130 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6131 register_name
= "LLAddr";
6133 case CP0_REG17__MAAR
:
6134 CP0_CHECK(ctx
->mrp
);
6135 gen_helper_mfc0_maar(arg
, cpu_env
);
6136 register_name
= "MAAR";
6138 case CP0_REG17__MAARI
:
6139 CP0_CHECK(ctx
->mrp
);
6140 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6141 register_name
= "MAARI";
6144 goto cp0_unimplemented
;
6147 case CP0_REGISTER_18
:
6149 case CP0_REG18__WATCHLO0
:
6150 case CP0_REG18__WATCHLO1
:
6151 case CP0_REG18__WATCHLO2
:
6152 case CP0_REG18__WATCHLO3
:
6153 case CP0_REG18__WATCHLO4
:
6154 case CP0_REG18__WATCHLO5
:
6155 case CP0_REG18__WATCHLO6
:
6156 case CP0_REG18__WATCHLO7
:
6157 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6158 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6159 register_name
= "WatchLo";
6162 goto cp0_unimplemented
;
6165 case CP0_REGISTER_19
:
6167 case CP0_REG19__WATCHHI0
:
6168 case CP0_REG19__WATCHHI1
:
6169 case CP0_REG19__WATCHHI2
:
6170 case CP0_REG19__WATCHHI3
:
6171 case CP0_REG19__WATCHHI4
:
6172 case CP0_REG19__WATCHHI5
:
6173 case CP0_REG19__WATCHHI6
:
6174 case CP0_REG19__WATCHHI7
:
6175 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6176 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6177 register_name
= "WatchHi";
6180 goto cp0_unimplemented
;
6183 case CP0_REGISTER_20
:
6185 case CP0_REG20__XCONTEXT
:
6186 #if defined(TARGET_MIPS64)
6187 check_insn(ctx
, ISA_MIPS3
);
6188 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6189 tcg_gen_ext32s_tl(arg
, arg
);
6190 register_name
= "XContext";
6194 goto cp0_unimplemented
;
6197 case CP0_REGISTER_21
:
6198 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6199 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6202 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6203 register_name
= "Framemask";
6206 goto cp0_unimplemented
;
6209 case CP0_REGISTER_22
:
6210 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6211 register_name
= "'Diagnostic"; /* implementation dependent */
6213 case CP0_REGISTER_23
:
6215 case CP0_REG23__DEBUG
:
6216 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6217 register_name
= "Debug";
6219 case CP0_REG23__TRACECONTROL
:
6220 /* PDtrace support */
6221 /* gen_helper_mfc0_tracecontrol(arg); */
6222 register_name
= "TraceControl";
6223 goto cp0_unimplemented
;
6224 case CP0_REG23__TRACECONTROL2
:
6225 /* PDtrace support */
6226 /* gen_helper_mfc0_tracecontrol2(arg); */
6227 register_name
= "TraceControl2";
6228 goto cp0_unimplemented
;
6229 case CP0_REG23__USERTRACEDATA1
:
6230 /* PDtrace support */
6231 /* gen_helper_mfc0_usertracedata1(arg);*/
6232 register_name
= "UserTraceData1";
6233 goto cp0_unimplemented
;
6234 case CP0_REG23__TRACEIBPC
:
6235 /* PDtrace support */
6236 /* gen_helper_mfc0_traceibpc(arg); */
6237 register_name
= "TraceIBPC";
6238 goto cp0_unimplemented
;
6239 case CP0_REG23__TRACEDBPC
:
6240 /* PDtrace support */
6241 /* gen_helper_mfc0_tracedbpc(arg); */
6242 register_name
= "TraceDBPC";
6243 goto cp0_unimplemented
;
6245 goto cp0_unimplemented
;
6248 case CP0_REGISTER_24
:
6250 case CP0_REG24__DEPC
:
6252 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6253 tcg_gen_ext32s_tl(arg
, arg
);
6254 register_name
= "DEPC";
6257 goto cp0_unimplemented
;
6260 case CP0_REGISTER_25
:
6262 case CP0_REG25__PERFCTL0
:
6263 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6264 register_name
= "Performance0";
6266 case CP0_REG25__PERFCNT0
:
6267 /* gen_helper_mfc0_performance1(arg); */
6268 register_name
= "Performance1";
6269 goto cp0_unimplemented
;
6270 case CP0_REG25__PERFCTL1
:
6271 /* gen_helper_mfc0_performance2(arg); */
6272 register_name
= "Performance2";
6273 goto cp0_unimplemented
;
6274 case CP0_REG25__PERFCNT1
:
6275 /* gen_helper_mfc0_performance3(arg); */
6276 register_name
= "Performance3";
6277 goto cp0_unimplemented
;
6278 case CP0_REG25__PERFCTL2
:
6279 /* gen_helper_mfc0_performance4(arg); */
6280 register_name
= "Performance4";
6281 goto cp0_unimplemented
;
6282 case CP0_REG25__PERFCNT2
:
6283 /* gen_helper_mfc0_performance5(arg); */
6284 register_name
= "Performance5";
6285 goto cp0_unimplemented
;
6286 case CP0_REG25__PERFCTL3
:
6287 /* gen_helper_mfc0_performance6(arg); */
6288 register_name
= "Performance6";
6289 goto cp0_unimplemented
;
6290 case CP0_REG25__PERFCNT3
:
6291 /* gen_helper_mfc0_performance7(arg); */
6292 register_name
= "Performance7";
6293 goto cp0_unimplemented
;
6295 goto cp0_unimplemented
;
6298 case CP0_REGISTER_26
:
6300 case CP0_REG26__ERRCTL
:
6301 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6302 register_name
= "ErrCtl";
6305 goto cp0_unimplemented
;
6308 case CP0_REGISTER_27
:
6310 case CP0_REG27__CACHERR
:
6311 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6312 register_name
= "CacheErr";
6315 goto cp0_unimplemented
;
6318 case CP0_REGISTER_28
:
6320 case CP0_REG28__TAGLO
:
6321 case CP0_REG28__TAGLO1
:
6322 case CP0_REG28__TAGLO2
:
6323 case CP0_REG28__TAGLO3
:
6325 TCGv_i64 tmp
= tcg_temp_new_i64();
6326 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
6327 gen_move_low32(arg
, tmp
);
6328 tcg_temp_free_i64(tmp
);
6330 register_name
= "TagLo";
6332 case CP0_REG28__DATALO
:
6333 case CP0_REG28__DATALO1
:
6334 case CP0_REG28__DATALO2
:
6335 case CP0_REG28__DATALO3
:
6336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6337 register_name
= "DataLo";
6340 goto cp0_unimplemented
;
6343 case CP0_REGISTER_29
:
6345 case CP0_REG29__TAGHI
:
6346 case CP0_REG29__TAGHI1
:
6347 case CP0_REG29__TAGHI2
:
6348 case CP0_REG29__TAGHI3
:
6349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6350 register_name
= "TagHi";
6352 case CP0_REG29__DATAHI
:
6353 case CP0_REG29__DATAHI1
:
6354 case CP0_REG29__DATAHI2
:
6355 case CP0_REG29__DATAHI3
:
6356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6357 register_name
= "DataHi";
6360 goto cp0_unimplemented
;
6363 case CP0_REGISTER_30
:
6365 case CP0_REG30__ERROREPC
:
6366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6367 tcg_gen_ext32s_tl(arg
, arg
);
6368 register_name
= "ErrorEPC";
6371 goto cp0_unimplemented
;
6374 case CP0_REGISTER_31
:
6376 case CP0_REG31__DESAVE
:
6378 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6379 register_name
= "DESAVE";
6381 case CP0_REG31__KSCRATCH1
:
6382 case CP0_REG31__KSCRATCH2
:
6383 case CP0_REG31__KSCRATCH3
:
6384 case CP0_REG31__KSCRATCH4
:
6385 case CP0_REG31__KSCRATCH5
:
6386 case CP0_REG31__KSCRATCH6
:
6387 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6388 tcg_gen_ld_tl(arg
, cpu_env
,
6389 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
6390 tcg_gen_ext32s_tl(arg
, arg
);
6391 register_name
= "KScratch";
6394 goto cp0_unimplemented
;
6398 goto cp0_unimplemented
;
6400 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
6404 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
6405 register_name
, reg
, sel
);
6406 gen_mfc0_unimplemented(ctx
, arg
);
6409 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6411 const char *register_name
= "invalid";
6414 check_insn(ctx
, ISA_MIPS_R1
);
6417 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6422 case CP0_REGISTER_00
:
6424 case CP0_REG00__INDEX
:
6425 gen_helper_mtc0_index(cpu_env
, arg
);
6426 register_name
= "Index";
6428 case CP0_REG00__MVPCONTROL
:
6429 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6430 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6431 register_name
= "MVPControl";
6433 case CP0_REG00__MVPCONF0
:
6434 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6436 register_name
= "MVPConf0";
6438 case CP0_REG00__MVPCONF1
:
6439 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6441 register_name
= "MVPConf1";
6443 case CP0_REG00__VPCONTROL
:
6446 register_name
= "VPControl";
6449 goto cp0_unimplemented
;
6452 case CP0_REGISTER_01
:
6454 case CP0_REG01__RANDOM
:
6456 register_name
= "Random";
6458 case CP0_REG01__VPECONTROL
:
6459 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6460 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6461 register_name
= "VPEControl";
6463 case CP0_REG01__VPECONF0
:
6464 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6465 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6466 register_name
= "VPEConf0";
6468 case CP0_REG01__VPECONF1
:
6469 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6470 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6471 register_name
= "VPEConf1";
6473 case CP0_REG01__YQMASK
:
6474 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6475 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6476 register_name
= "YQMask";
6478 case CP0_REG01__VPESCHEDULE
:
6479 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6480 tcg_gen_st_tl(arg
, cpu_env
,
6481 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6482 register_name
= "VPESchedule";
6484 case CP0_REG01__VPESCHEFBACK
:
6485 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6486 tcg_gen_st_tl(arg
, cpu_env
,
6487 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6488 register_name
= "VPEScheFBack";
6490 case CP0_REG01__VPEOPT
:
6491 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6492 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6493 register_name
= "VPEOpt";
6496 goto cp0_unimplemented
;
6499 case CP0_REGISTER_02
:
6501 case CP0_REG02__ENTRYLO0
:
6502 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6503 register_name
= "EntryLo0";
6505 case CP0_REG02__TCSTATUS
:
6506 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6507 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6508 register_name
= "TCStatus";
6510 case CP0_REG02__TCBIND
:
6511 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6512 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6513 register_name
= "TCBind";
6515 case CP0_REG02__TCRESTART
:
6516 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6517 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6518 register_name
= "TCRestart";
6520 case CP0_REG02__TCHALT
:
6521 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6522 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6523 register_name
= "TCHalt";
6525 case CP0_REG02__TCCONTEXT
:
6526 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6527 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6528 register_name
= "TCContext";
6530 case CP0_REG02__TCSCHEDULE
:
6531 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6532 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6533 register_name
= "TCSchedule";
6535 case CP0_REG02__TCSCHEFBACK
:
6536 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6537 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6538 register_name
= "TCScheFBack";
6541 goto cp0_unimplemented
;
6544 case CP0_REGISTER_03
:
6546 case CP0_REG03__ENTRYLO1
:
6547 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6548 register_name
= "EntryLo1";
6550 case CP0_REG03__GLOBALNUM
:
6553 register_name
= "GlobalNumber";
6556 goto cp0_unimplemented
;
6559 case CP0_REGISTER_04
:
6561 case CP0_REG04__CONTEXT
:
6562 gen_helper_mtc0_context(cpu_env
, arg
);
6563 register_name
= "Context";
6565 case CP0_REG04__CONTEXTCONFIG
:
6567 /* gen_helper_mtc0_contextconfig(arg); */
6568 register_name
= "ContextConfig";
6569 goto cp0_unimplemented
;
6570 case CP0_REG04__USERLOCAL
:
6571 CP0_CHECK(ctx
->ulri
);
6572 tcg_gen_st_tl(arg
, cpu_env
,
6573 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6574 register_name
= "UserLocal";
6576 case CP0_REG04__MMID
:
6578 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
6579 register_name
= "MMID";
6582 goto cp0_unimplemented
;
6585 case CP0_REGISTER_05
:
6587 case CP0_REG05__PAGEMASK
:
6588 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6589 register_name
= "PageMask";
6591 case CP0_REG05__PAGEGRAIN
:
6592 check_insn(ctx
, ISA_MIPS_R2
);
6593 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6594 register_name
= "PageGrain";
6595 ctx
->base
.is_jmp
= DISAS_STOP
;
6597 case CP0_REG05__SEGCTL0
:
6599 gen_helper_mtc0_segctl0(cpu_env
, arg
);
6600 register_name
= "SegCtl0";
6602 case CP0_REG05__SEGCTL1
:
6604 gen_helper_mtc0_segctl1(cpu_env
, arg
);
6605 register_name
= "SegCtl1";
6607 case CP0_REG05__SEGCTL2
:
6609 gen_helper_mtc0_segctl2(cpu_env
, arg
);
6610 register_name
= "SegCtl2";
6612 case CP0_REG05__PWBASE
:
6614 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6615 register_name
= "PWBase";
6617 case CP0_REG05__PWFIELD
:
6619 gen_helper_mtc0_pwfield(cpu_env
, arg
);
6620 register_name
= "PWField";
6622 case CP0_REG05__PWSIZE
:
6624 gen_helper_mtc0_pwsize(cpu_env
, arg
);
6625 register_name
= "PWSize";
6628 goto cp0_unimplemented
;
6631 case CP0_REGISTER_06
:
6633 case CP0_REG06__WIRED
:
6634 gen_helper_mtc0_wired(cpu_env
, arg
);
6635 register_name
= "Wired";
6637 case CP0_REG06__SRSCONF0
:
6638 check_insn(ctx
, ISA_MIPS_R2
);
6639 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6640 register_name
= "SRSConf0";
6642 case CP0_REG06__SRSCONF1
:
6643 check_insn(ctx
, ISA_MIPS_R2
);
6644 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6645 register_name
= "SRSConf1";
6647 case CP0_REG06__SRSCONF2
:
6648 check_insn(ctx
, ISA_MIPS_R2
);
6649 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6650 register_name
= "SRSConf2";
6652 case CP0_REG06__SRSCONF3
:
6653 check_insn(ctx
, ISA_MIPS_R2
);
6654 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6655 register_name
= "SRSConf3";
6657 case CP0_REG06__SRSCONF4
:
6658 check_insn(ctx
, ISA_MIPS_R2
);
6659 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6660 register_name
= "SRSConf4";
6662 case CP0_REG06__PWCTL
:
6664 gen_helper_mtc0_pwctl(cpu_env
, arg
);
6665 register_name
= "PWCtl";
6668 goto cp0_unimplemented
;
6671 case CP0_REGISTER_07
:
6673 case CP0_REG07__HWRENA
:
6674 check_insn(ctx
, ISA_MIPS_R2
);
6675 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6676 ctx
->base
.is_jmp
= DISAS_STOP
;
6677 register_name
= "HWREna";
6680 goto cp0_unimplemented
;
6683 case CP0_REGISTER_08
:
6685 case CP0_REG08__BADVADDR
:
6687 register_name
= "BadVAddr";
6689 case CP0_REG08__BADINSTR
:
6691 register_name
= "BadInstr";
6693 case CP0_REG08__BADINSTRP
:
6695 register_name
= "BadInstrP";
6697 case CP0_REG08__BADINSTRX
:
6699 register_name
= "BadInstrX";
6702 goto cp0_unimplemented
;
6705 case CP0_REGISTER_09
:
6707 case CP0_REG09__COUNT
:
6708 gen_helper_mtc0_count(cpu_env
, arg
);
6709 register_name
= "Count";
6711 case CP0_REG09__SAARI
:
6712 CP0_CHECK(ctx
->saar
);
6713 gen_helper_mtc0_saari(cpu_env
, arg
);
6714 register_name
= "SAARI";
6716 case CP0_REG09__SAAR
:
6717 CP0_CHECK(ctx
->saar
);
6718 gen_helper_mtc0_saar(cpu_env
, arg
);
6719 register_name
= "SAAR";
6722 goto cp0_unimplemented
;
6725 case CP0_REGISTER_10
:
6727 case CP0_REG10__ENTRYHI
:
6728 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6729 register_name
= "EntryHi";
6732 goto cp0_unimplemented
;
6735 case CP0_REGISTER_11
:
6737 case CP0_REG11__COMPARE
:
6738 gen_helper_mtc0_compare(cpu_env
, arg
);
6739 register_name
= "Compare";
6741 /* 6,7 are implementation dependent */
6743 goto cp0_unimplemented
;
6746 case CP0_REGISTER_12
:
6748 case CP0_REG12__STATUS
:
6749 save_cpu_state(ctx
, 1);
6750 gen_helper_mtc0_status(cpu_env
, arg
);
6751 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6752 gen_save_pc(ctx
->base
.pc_next
+ 4);
6753 ctx
->base
.is_jmp
= DISAS_EXIT
;
6754 register_name
= "Status";
6756 case CP0_REG12__INTCTL
:
6757 check_insn(ctx
, ISA_MIPS_R2
);
6758 gen_helper_mtc0_intctl(cpu_env
, arg
);
6759 /* Stop translation as we may have switched the execution mode */
6760 ctx
->base
.is_jmp
= DISAS_STOP
;
6761 register_name
= "IntCtl";
6763 case CP0_REG12__SRSCTL
:
6764 check_insn(ctx
, ISA_MIPS_R2
);
6765 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6766 /* Stop translation as we may have switched the execution mode */
6767 ctx
->base
.is_jmp
= DISAS_STOP
;
6768 register_name
= "SRSCtl";
6770 case CP0_REG12__SRSMAP
:
6771 check_insn(ctx
, ISA_MIPS_R2
);
6772 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6773 /* Stop translation as we may have switched the execution mode */
6774 ctx
->base
.is_jmp
= DISAS_STOP
;
6775 register_name
= "SRSMap";
6778 goto cp0_unimplemented
;
6781 case CP0_REGISTER_13
:
6783 case CP0_REG13__CAUSE
:
6784 save_cpu_state(ctx
, 1);
6785 gen_helper_mtc0_cause(cpu_env
, arg
);
6787 * Stop translation as we may have triggered an interrupt.
6788 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6789 * translated code to check for pending interrupts.
6791 gen_save_pc(ctx
->base
.pc_next
+ 4);
6792 ctx
->base
.is_jmp
= DISAS_EXIT
;
6793 register_name
= "Cause";
6796 goto cp0_unimplemented
;
6799 case CP0_REGISTER_14
:
6801 case CP0_REG14__EPC
:
6802 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6803 register_name
= "EPC";
6806 goto cp0_unimplemented
;
6809 case CP0_REGISTER_15
:
6811 case CP0_REG15__PRID
:
6813 register_name
= "PRid";
6815 case CP0_REG15__EBASE
:
6816 check_insn(ctx
, ISA_MIPS_R2
);
6817 gen_helper_mtc0_ebase(cpu_env
, arg
);
6818 register_name
= "EBase";
6821 goto cp0_unimplemented
;
6824 case CP0_REGISTER_16
:
6826 case CP0_REG16__CONFIG
:
6827 gen_helper_mtc0_config0(cpu_env
, arg
);
6828 register_name
= "Config";
6829 /* Stop translation as we may have switched the execution mode */
6830 ctx
->base
.is_jmp
= DISAS_STOP
;
6832 case CP0_REG16__CONFIG1
:
6833 /* ignored, read only */
6834 register_name
= "Config1";
6836 case CP0_REG16__CONFIG2
:
6837 gen_helper_mtc0_config2(cpu_env
, arg
);
6838 register_name
= "Config2";
6839 /* Stop translation as we may have switched the execution mode */
6840 ctx
->base
.is_jmp
= DISAS_STOP
;
6842 case CP0_REG16__CONFIG3
:
6843 gen_helper_mtc0_config3(cpu_env
, arg
);
6844 register_name
= "Config3";
6845 /* Stop translation as we may have switched the execution mode */
6846 ctx
->base
.is_jmp
= DISAS_STOP
;
6848 case CP0_REG16__CONFIG4
:
6849 gen_helper_mtc0_config4(cpu_env
, arg
);
6850 register_name
= "Config4";
6851 ctx
->base
.is_jmp
= DISAS_STOP
;
6853 case CP0_REG16__CONFIG5
:
6854 gen_helper_mtc0_config5(cpu_env
, arg
);
6855 register_name
= "Config5";
6856 /* Stop translation as we may have switched the execution mode */
6857 ctx
->base
.is_jmp
= DISAS_STOP
;
6859 /* 6,7 are implementation dependent */
6860 case CP0_REG16__CONFIG6
:
6862 register_name
= "Config6";
6864 case CP0_REG16__CONFIG7
:
6866 register_name
= "Config7";
6869 register_name
= "Invalid config selector";
6870 goto cp0_unimplemented
;
6873 case CP0_REGISTER_17
:
6875 case CP0_REG17__LLADDR
:
6876 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6877 register_name
= "LLAddr";
6879 case CP0_REG17__MAAR
:
6880 CP0_CHECK(ctx
->mrp
);
6881 gen_helper_mtc0_maar(cpu_env
, arg
);
6882 register_name
= "MAAR";
6884 case CP0_REG17__MAARI
:
6885 CP0_CHECK(ctx
->mrp
);
6886 gen_helper_mtc0_maari(cpu_env
, arg
);
6887 register_name
= "MAARI";
6890 goto cp0_unimplemented
;
6893 case CP0_REGISTER_18
:
6895 case CP0_REG18__WATCHLO0
:
6896 case CP0_REG18__WATCHLO1
:
6897 case CP0_REG18__WATCHLO2
:
6898 case CP0_REG18__WATCHLO3
:
6899 case CP0_REG18__WATCHLO4
:
6900 case CP0_REG18__WATCHLO5
:
6901 case CP0_REG18__WATCHLO6
:
6902 case CP0_REG18__WATCHLO7
:
6903 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6904 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6905 register_name
= "WatchLo";
6908 goto cp0_unimplemented
;
6911 case CP0_REGISTER_19
:
6913 case CP0_REG19__WATCHHI0
:
6914 case CP0_REG19__WATCHHI1
:
6915 case CP0_REG19__WATCHHI2
:
6916 case CP0_REG19__WATCHHI3
:
6917 case CP0_REG19__WATCHHI4
:
6918 case CP0_REG19__WATCHHI5
:
6919 case CP0_REG19__WATCHHI6
:
6920 case CP0_REG19__WATCHHI7
:
6921 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6922 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6923 register_name
= "WatchHi";
6926 goto cp0_unimplemented
;
6929 case CP0_REGISTER_20
:
6931 case CP0_REG20__XCONTEXT
:
6932 #if defined(TARGET_MIPS64)
6933 check_insn(ctx
, ISA_MIPS3
);
6934 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6935 register_name
= "XContext";
6939 goto cp0_unimplemented
;
6942 case CP0_REGISTER_21
:
6943 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6944 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6947 gen_helper_mtc0_framemask(cpu_env
, arg
);
6948 register_name
= "Framemask";
6951 goto cp0_unimplemented
;
6954 case CP0_REGISTER_22
:
6956 register_name
= "Diagnostic"; /* implementation dependent */
6958 case CP0_REGISTER_23
:
6960 case CP0_REG23__DEBUG
:
6961 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6962 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6963 gen_save_pc(ctx
->base
.pc_next
+ 4);
6964 ctx
->base
.is_jmp
= DISAS_EXIT
;
6965 register_name
= "Debug";
6967 case CP0_REG23__TRACECONTROL
:
6968 /* PDtrace support */
6969 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
6970 register_name
= "TraceControl";
6971 /* Stop translation as we may have switched the execution mode */
6972 ctx
->base
.is_jmp
= DISAS_STOP
;
6973 goto cp0_unimplemented
;
6974 case CP0_REG23__TRACECONTROL2
:
6975 /* PDtrace support */
6976 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
6977 register_name
= "TraceControl2";
6978 /* Stop translation as we may have switched the execution mode */
6979 ctx
->base
.is_jmp
= DISAS_STOP
;
6980 goto cp0_unimplemented
;
6981 case CP0_REG23__USERTRACEDATA1
:
6982 /* Stop translation as we may have switched the execution mode */
6983 ctx
->base
.is_jmp
= DISAS_STOP
;
6984 /* PDtrace support */
6985 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
6986 register_name
= "UserTraceData";
6987 /* Stop translation as we may have switched the execution mode */
6988 ctx
->base
.is_jmp
= DISAS_STOP
;
6989 goto cp0_unimplemented
;
6990 case CP0_REG23__TRACEIBPC
:
6991 /* PDtrace support */
6992 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
6993 /* Stop translation as we may have switched the execution mode */
6994 ctx
->base
.is_jmp
= DISAS_STOP
;
6995 register_name
= "TraceIBPC";
6996 goto cp0_unimplemented
;
6997 case CP0_REG23__TRACEDBPC
:
6998 /* PDtrace support */
6999 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
7000 /* Stop translation as we may have switched the execution mode */
7001 ctx
->base
.is_jmp
= DISAS_STOP
;
7002 register_name
= "TraceDBPC";
7003 goto cp0_unimplemented
;
7005 goto cp0_unimplemented
;
7008 case CP0_REGISTER_24
:
7010 case CP0_REG24__DEPC
:
7012 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7013 register_name
= "DEPC";
7016 goto cp0_unimplemented
;
7019 case CP0_REGISTER_25
:
7021 case CP0_REG25__PERFCTL0
:
7022 gen_helper_mtc0_performance0(cpu_env
, arg
);
7023 register_name
= "Performance0";
7025 case CP0_REG25__PERFCNT0
:
7026 /* gen_helper_mtc0_performance1(arg); */
7027 register_name
= "Performance1";
7028 goto cp0_unimplemented
;
7029 case CP0_REG25__PERFCTL1
:
7030 /* gen_helper_mtc0_performance2(arg); */
7031 register_name
= "Performance2";
7032 goto cp0_unimplemented
;
7033 case CP0_REG25__PERFCNT1
:
7034 /* gen_helper_mtc0_performance3(arg); */
7035 register_name
= "Performance3";
7036 goto cp0_unimplemented
;
7037 case CP0_REG25__PERFCTL2
:
7038 /* gen_helper_mtc0_performance4(arg); */
7039 register_name
= "Performance4";
7040 goto cp0_unimplemented
;
7041 case CP0_REG25__PERFCNT2
:
7042 /* gen_helper_mtc0_performance5(arg); */
7043 register_name
= "Performance5";
7044 goto cp0_unimplemented
;
7045 case CP0_REG25__PERFCTL3
:
7046 /* gen_helper_mtc0_performance6(arg); */
7047 register_name
= "Performance6";
7048 goto cp0_unimplemented
;
7049 case CP0_REG25__PERFCNT3
:
7050 /* gen_helper_mtc0_performance7(arg); */
7051 register_name
= "Performance7";
7052 goto cp0_unimplemented
;
7054 goto cp0_unimplemented
;
7057 case CP0_REGISTER_26
:
7059 case CP0_REG26__ERRCTL
:
7060 gen_helper_mtc0_errctl(cpu_env
, arg
);
7061 ctx
->base
.is_jmp
= DISAS_STOP
;
7062 register_name
= "ErrCtl";
7065 goto cp0_unimplemented
;
7068 case CP0_REGISTER_27
:
7070 case CP0_REG27__CACHERR
:
7072 register_name
= "CacheErr";
7075 goto cp0_unimplemented
;
7078 case CP0_REGISTER_28
:
7080 case CP0_REG28__TAGLO
:
7081 case CP0_REG28__TAGLO1
:
7082 case CP0_REG28__TAGLO2
:
7083 case CP0_REG28__TAGLO3
:
7084 gen_helper_mtc0_taglo(cpu_env
, arg
);
7085 register_name
= "TagLo";
7087 case CP0_REG28__DATALO
:
7088 case CP0_REG28__DATALO1
:
7089 case CP0_REG28__DATALO2
:
7090 case CP0_REG28__DATALO3
:
7091 gen_helper_mtc0_datalo(cpu_env
, arg
);
7092 register_name
= "DataLo";
7095 goto cp0_unimplemented
;
7098 case CP0_REGISTER_29
:
7100 case CP0_REG29__TAGHI
:
7101 case CP0_REG29__TAGHI1
:
7102 case CP0_REG29__TAGHI2
:
7103 case CP0_REG29__TAGHI3
:
7104 gen_helper_mtc0_taghi(cpu_env
, arg
);
7105 register_name
= "TagHi";
7107 case CP0_REG29__DATAHI
:
7108 case CP0_REG29__DATAHI1
:
7109 case CP0_REG29__DATAHI2
:
7110 case CP0_REG29__DATAHI3
:
7111 gen_helper_mtc0_datahi(cpu_env
, arg
);
7112 register_name
= "DataHi";
7115 register_name
= "invalid sel";
7116 goto cp0_unimplemented
;
7119 case CP0_REGISTER_30
:
7121 case CP0_REG30__ERROREPC
:
7122 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7123 register_name
= "ErrorEPC";
7126 goto cp0_unimplemented
;
7129 case CP0_REGISTER_31
:
7131 case CP0_REG31__DESAVE
:
7133 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7134 register_name
= "DESAVE";
7136 case CP0_REG31__KSCRATCH1
:
7137 case CP0_REG31__KSCRATCH2
:
7138 case CP0_REG31__KSCRATCH3
:
7139 case CP0_REG31__KSCRATCH4
:
7140 case CP0_REG31__KSCRATCH5
:
7141 case CP0_REG31__KSCRATCH6
:
7142 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7143 tcg_gen_st_tl(arg
, cpu_env
,
7144 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7145 register_name
= "KScratch";
7148 goto cp0_unimplemented
;
7152 goto cp0_unimplemented
;
7154 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
7156 /* For simplicity assume that all writes can cause interrupts. */
7157 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7159 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7160 * translated code to check for pending interrupts.
7162 gen_save_pc(ctx
->base
.pc_next
+ 4);
7163 ctx
->base
.is_jmp
= DISAS_EXIT
;
7168 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
7169 register_name
, reg
, sel
);
7172 #if defined(TARGET_MIPS64)
7173 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7175 const char *register_name
= "invalid";
7178 check_insn(ctx
, ISA_MIPS_R1
);
7182 case CP0_REGISTER_00
:
7184 case CP0_REG00__INDEX
:
7185 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7186 register_name
= "Index";
7188 case CP0_REG00__MVPCONTROL
:
7189 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7190 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7191 register_name
= "MVPControl";
7193 case CP0_REG00__MVPCONF0
:
7194 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7195 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7196 register_name
= "MVPConf0";
7198 case CP0_REG00__MVPCONF1
:
7199 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7200 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7201 register_name
= "MVPConf1";
7203 case CP0_REG00__VPCONTROL
:
7205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7206 register_name
= "VPControl";
7209 goto cp0_unimplemented
;
7212 case CP0_REGISTER_01
:
7214 case CP0_REG01__RANDOM
:
7215 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7216 gen_helper_mfc0_random(arg
, cpu_env
);
7217 register_name
= "Random";
7219 case CP0_REG01__VPECONTROL
:
7220 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7221 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7222 register_name
= "VPEControl";
7224 case CP0_REG01__VPECONF0
:
7225 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7226 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7227 register_name
= "VPEConf0";
7229 case CP0_REG01__VPECONF1
:
7230 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7231 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7232 register_name
= "VPEConf1";
7234 case CP0_REG01__YQMASK
:
7235 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7236 tcg_gen_ld_tl(arg
, cpu_env
,
7237 offsetof(CPUMIPSState
, CP0_YQMask
));
7238 register_name
= "YQMask";
7240 case CP0_REG01__VPESCHEDULE
:
7241 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7242 tcg_gen_ld_tl(arg
, cpu_env
,
7243 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7244 register_name
= "VPESchedule";
7246 case CP0_REG01__VPESCHEFBACK
:
7247 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7248 tcg_gen_ld_tl(arg
, cpu_env
,
7249 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7250 register_name
= "VPEScheFBack";
7252 case CP0_REG01__VPEOPT
:
7253 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7254 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7255 register_name
= "VPEOpt";
7258 goto cp0_unimplemented
;
7261 case CP0_REGISTER_02
:
7263 case CP0_REG02__ENTRYLO0
:
7264 tcg_gen_ld_tl(arg
, cpu_env
,
7265 offsetof(CPUMIPSState
, CP0_EntryLo0
));
7266 register_name
= "EntryLo0";
7268 case CP0_REG02__TCSTATUS
:
7269 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7270 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7271 register_name
= "TCStatus";
7273 case CP0_REG02__TCBIND
:
7274 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7275 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7276 register_name
= "TCBind";
7278 case CP0_REG02__TCRESTART
:
7279 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7280 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7281 register_name
= "TCRestart";
7283 case CP0_REG02__TCHALT
:
7284 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7285 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7286 register_name
= "TCHalt";
7288 case CP0_REG02__TCCONTEXT
:
7289 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7290 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
7291 register_name
= "TCContext";
7293 case CP0_REG02__TCSCHEDULE
:
7294 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7295 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
7296 register_name
= "TCSchedule";
7298 case CP0_REG02__TCSCHEFBACK
:
7299 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7300 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
7301 register_name
= "TCScheFBack";
7304 goto cp0_unimplemented
;
7307 case CP0_REGISTER_03
:
7309 case CP0_REG03__ENTRYLO1
:
7310 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
7311 register_name
= "EntryLo1";
7313 case CP0_REG03__GLOBALNUM
:
7315 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
7316 register_name
= "GlobalNumber";
7319 goto cp0_unimplemented
;
7322 case CP0_REGISTER_04
:
7324 case CP0_REG04__CONTEXT
:
7325 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
7326 register_name
= "Context";
7328 case CP0_REG04__CONTEXTCONFIG
:
7330 /* gen_helper_dmfc0_contextconfig(arg); */
7331 register_name
= "ContextConfig";
7332 goto cp0_unimplemented
;
7333 case CP0_REG04__USERLOCAL
:
7334 CP0_CHECK(ctx
->ulri
);
7335 tcg_gen_ld_tl(arg
, cpu_env
,
7336 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7337 register_name
= "UserLocal";
7339 case CP0_REG04__MMID
:
7341 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
7342 register_name
= "MMID";
7345 goto cp0_unimplemented
;
7348 case CP0_REGISTER_05
:
7350 case CP0_REG05__PAGEMASK
:
7351 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7352 register_name
= "PageMask";
7354 case CP0_REG05__PAGEGRAIN
:
7355 check_insn(ctx
, ISA_MIPS_R2
);
7356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7357 register_name
= "PageGrain";
7359 case CP0_REG05__SEGCTL0
:
7361 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7362 register_name
= "SegCtl0";
7364 case CP0_REG05__SEGCTL1
:
7366 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7367 register_name
= "SegCtl1";
7369 case CP0_REG05__SEGCTL2
:
7371 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7372 register_name
= "SegCtl2";
7374 case CP0_REG05__PWBASE
:
7376 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
7377 register_name
= "PWBase";
7379 case CP0_REG05__PWFIELD
:
7381 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
7382 register_name
= "PWField";
7384 case CP0_REG05__PWSIZE
:
7386 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
7387 register_name
= "PWSize";
7390 goto cp0_unimplemented
;
7393 case CP0_REGISTER_06
:
7395 case CP0_REG06__WIRED
:
7396 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7397 register_name
= "Wired";
7399 case CP0_REG06__SRSCONF0
:
7400 check_insn(ctx
, ISA_MIPS_R2
);
7401 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7402 register_name
= "SRSConf0";
7404 case CP0_REG06__SRSCONF1
:
7405 check_insn(ctx
, ISA_MIPS_R2
);
7406 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7407 register_name
= "SRSConf1";
7409 case CP0_REG06__SRSCONF2
:
7410 check_insn(ctx
, ISA_MIPS_R2
);
7411 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7412 register_name
= "SRSConf2";
7414 case CP0_REG06__SRSCONF3
:
7415 check_insn(ctx
, ISA_MIPS_R2
);
7416 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7417 register_name
= "SRSConf3";
7419 case CP0_REG06__SRSCONF4
:
7420 check_insn(ctx
, ISA_MIPS_R2
);
7421 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7422 register_name
= "SRSConf4";
7424 case CP0_REG06__PWCTL
:
7426 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7427 register_name
= "PWCtl";
7430 goto cp0_unimplemented
;
7433 case CP0_REGISTER_07
:
7435 case CP0_REG07__HWRENA
:
7436 check_insn(ctx
, ISA_MIPS_R2
);
7437 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7438 register_name
= "HWREna";
7441 goto cp0_unimplemented
;
7444 case CP0_REGISTER_08
:
7446 case CP0_REG08__BADVADDR
:
7447 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7448 register_name
= "BadVAddr";
7450 case CP0_REG08__BADINSTR
:
7452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7453 register_name
= "BadInstr";
7455 case CP0_REG08__BADINSTRP
:
7457 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7458 register_name
= "BadInstrP";
7460 case CP0_REG08__BADINSTRX
:
7462 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7463 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7464 register_name
= "BadInstrX";
7467 goto cp0_unimplemented
;
7470 case CP0_REGISTER_09
:
7472 case CP0_REG09__COUNT
:
7473 /* Mark as an IO operation because we read the time. */
7474 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7477 gen_helper_mfc0_count(arg
, cpu_env
);
7479 * Break the TB to be able to take timer interrupts immediately
7480 * after reading count. DISAS_STOP isn't sufficient, we need to
7481 * ensure we break completely out of translated code.
7483 gen_save_pc(ctx
->base
.pc_next
+ 4);
7484 ctx
->base
.is_jmp
= DISAS_EXIT
;
7485 register_name
= "Count";
7487 case CP0_REG09__SAARI
:
7488 CP0_CHECK(ctx
->saar
);
7489 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7490 register_name
= "SAARI";
7492 case CP0_REG09__SAAR
:
7493 CP0_CHECK(ctx
->saar
);
7494 gen_helper_dmfc0_saar(arg
, cpu_env
);
7495 register_name
= "SAAR";
7498 goto cp0_unimplemented
;
7501 case CP0_REGISTER_10
:
7503 case CP0_REG10__ENTRYHI
:
7504 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7505 register_name
= "EntryHi";
7508 goto cp0_unimplemented
;
7511 case CP0_REGISTER_11
:
7513 case CP0_REG11__COMPARE
:
7514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7515 register_name
= "Compare";
7517 /* 6,7 are implementation dependent */
7519 goto cp0_unimplemented
;
7522 case CP0_REGISTER_12
:
7524 case CP0_REG12__STATUS
:
7525 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7526 register_name
= "Status";
7528 case CP0_REG12__INTCTL
:
7529 check_insn(ctx
, ISA_MIPS_R2
);
7530 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7531 register_name
= "IntCtl";
7533 case CP0_REG12__SRSCTL
:
7534 check_insn(ctx
, ISA_MIPS_R2
);
7535 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7536 register_name
= "SRSCtl";
7538 case CP0_REG12__SRSMAP
:
7539 check_insn(ctx
, ISA_MIPS_R2
);
7540 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7541 register_name
= "SRSMap";
7544 goto cp0_unimplemented
;
7547 case CP0_REGISTER_13
:
7549 case CP0_REG13__CAUSE
:
7550 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7551 register_name
= "Cause";
7554 goto cp0_unimplemented
;
7557 case CP0_REGISTER_14
:
7559 case CP0_REG14__EPC
:
7560 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7561 register_name
= "EPC";
7564 goto cp0_unimplemented
;
7567 case CP0_REGISTER_15
:
7569 case CP0_REG15__PRID
:
7570 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7571 register_name
= "PRid";
7573 case CP0_REG15__EBASE
:
7574 check_insn(ctx
, ISA_MIPS_R2
);
7575 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7576 register_name
= "EBase";
7578 case CP0_REG15__CMGCRBASE
:
7579 check_insn(ctx
, ISA_MIPS_R2
);
7580 CP0_CHECK(ctx
->cmgcr
);
7581 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7582 register_name
= "CMGCRBase";
7585 goto cp0_unimplemented
;
7588 case CP0_REGISTER_16
:
7590 case CP0_REG16__CONFIG
:
7591 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7592 register_name
= "Config";
7594 case CP0_REG16__CONFIG1
:
7595 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7596 register_name
= "Config1";
7598 case CP0_REG16__CONFIG2
:
7599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7600 register_name
= "Config2";
7602 case CP0_REG16__CONFIG3
:
7603 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7604 register_name
= "Config3";
7606 case CP0_REG16__CONFIG4
:
7607 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7608 register_name
= "Config4";
7610 case CP0_REG16__CONFIG5
:
7611 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7612 register_name
= "Config5";
7614 /* 6,7 are implementation dependent */
7615 case CP0_REG16__CONFIG6
:
7616 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7617 register_name
= "Config6";
7619 case CP0_REG16__CONFIG7
:
7620 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7621 register_name
= "Config7";
7624 goto cp0_unimplemented
;
7627 case CP0_REGISTER_17
:
7629 case CP0_REG17__LLADDR
:
7630 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
7631 register_name
= "LLAddr";
7633 case CP0_REG17__MAAR
:
7634 CP0_CHECK(ctx
->mrp
);
7635 gen_helper_dmfc0_maar(arg
, cpu_env
);
7636 register_name
= "MAAR";
7638 case CP0_REG17__MAARI
:
7639 CP0_CHECK(ctx
->mrp
);
7640 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7641 register_name
= "MAARI";
7644 goto cp0_unimplemented
;
7647 case CP0_REGISTER_18
:
7649 case CP0_REG18__WATCHLO0
:
7650 case CP0_REG18__WATCHLO1
:
7651 case CP0_REG18__WATCHLO2
:
7652 case CP0_REG18__WATCHLO3
:
7653 case CP0_REG18__WATCHLO4
:
7654 case CP0_REG18__WATCHLO5
:
7655 case CP0_REG18__WATCHLO6
:
7656 case CP0_REG18__WATCHLO7
:
7657 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7658 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
7659 register_name
= "WatchLo";
7662 goto cp0_unimplemented
;
7665 case CP0_REGISTER_19
:
7667 case CP0_REG19__WATCHHI0
:
7668 case CP0_REG19__WATCHHI1
:
7669 case CP0_REG19__WATCHHI2
:
7670 case CP0_REG19__WATCHHI3
:
7671 case CP0_REG19__WATCHHI4
:
7672 case CP0_REG19__WATCHHI5
:
7673 case CP0_REG19__WATCHHI6
:
7674 case CP0_REG19__WATCHHI7
:
7675 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7676 gen_helper_1e0i(dmfc0_watchhi
, arg
, sel
);
7677 register_name
= "WatchHi";
7680 goto cp0_unimplemented
;
7683 case CP0_REGISTER_20
:
7685 case CP0_REG20__XCONTEXT
:
7686 check_insn(ctx
, ISA_MIPS3
);
7687 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7688 register_name
= "XContext";
7691 goto cp0_unimplemented
;
7694 case CP0_REGISTER_21
:
7695 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7696 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7699 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7700 register_name
= "Framemask";
7703 goto cp0_unimplemented
;
7706 case CP0_REGISTER_22
:
7707 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7708 register_name
= "'Diagnostic"; /* implementation dependent */
7710 case CP0_REGISTER_23
:
7712 case CP0_REG23__DEBUG
:
7713 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7714 register_name
= "Debug";
7716 case CP0_REG23__TRACECONTROL
:
7717 /* PDtrace support */
7718 /* gen_helper_dmfc0_tracecontrol(arg, cpu_env); */
7719 register_name
= "TraceControl";
7720 goto cp0_unimplemented
;
7721 case CP0_REG23__TRACECONTROL2
:
7722 /* PDtrace support */
7723 /* gen_helper_dmfc0_tracecontrol2(arg, cpu_env); */
7724 register_name
= "TraceControl2";
7725 goto cp0_unimplemented
;
7726 case CP0_REG23__USERTRACEDATA1
:
7727 /* PDtrace support */
7728 /* gen_helper_dmfc0_usertracedata1(arg, cpu_env);*/
7729 register_name
= "UserTraceData1";
7730 goto cp0_unimplemented
;
7731 case CP0_REG23__TRACEIBPC
:
7732 /* PDtrace support */
7733 /* gen_helper_dmfc0_traceibpc(arg, cpu_env); */
7734 register_name
= "TraceIBPC";
7735 goto cp0_unimplemented
;
7736 case CP0_REG23__TRACEDBPC
:
7737 /* PDtrace support */
7738 /* gen_helper_dmfc0_tracedbpc(arg, cpu_env); */
7739 register_name
= "TraceDBPC";
7740 goto cp0_unimplemented
;
7742 goto cp0_unimplemented
;
7745 case CP0_REGISTER_24
:
7747 case CP0_REG24__DEPC
:
7749 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7750 register_name
= "DEPC";
7753 goto cp0_unimplemented
;
7756 case CP0_REGISTER_25
:
7758 case CP0_REG25__PERFCTL0
:
7759 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7760 register_name
= "Performance0";
7762 case CP0_REG25__PERFCNT0
:
7763 /* gen_helper_dmfc0_performance1(arg); */
7764 register_name
= "Performance1";
7765 goto cp0_unimplemented
;
7766 case CP0_REG25__PERFCTL1
:
7767 /* gen_helper_dmfc0_performance2(arg); */
7768 register_name
= "Performance2";
7769 goto cp0_unimplemented
;
7770 case CP0_REG25__PERFCNT1
:
7771 /* gen_helper_dmfc0_performance3(arg); */
7772 register_name
= "Performance3";
7773 goto cp0_unimplemented
;
7774 case CP0_REG25__PERFCTL2
:
7775 /* gen_helper_dmfc0_performance4(arg); */
7776 register_name
= "Performance4";
7777 goto cp0_unimplemented
;
7778 case CP0_REG25__PERFCNT2
:
7779 /* gen_helper_dmfc0_performance5(arg); */
7780 register_name
= "Performance5";
7781 goto cp0_unimplemented
;
7782 case CP0_REG25__PERFCTL3
:
7783 /* gen_helper_dmfc0_performance6(arg); */
7784 register_name
= "Performance6";
7785 goto cp0_unimplemented
;
7786 case CP0_REG25__PERFCNT3
:
7787 /* gen_helper_dmfc0_performance7(arg); */
7788 register_name
= "Performance7";
7789 goto cp0_unimplemented
;
7791 goto cp0_unimplemented
;
7794 case CP0_REGISTER_26
:
7796 case CP0_REG26__ERRCTL
:
7797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7798 register_name
= "ErrCtl";
7801 goto cp0_unimplemented
;
7804 case CP0_REGISTER_27
:
7807 case CP0_REG27__CACHERR
:
7808 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7809 register_name
= "CacheErr";
7812 goto cp0_unimplemented
;
7815 case CP0_REGISTER_28
:
7817 case CP0_REG28__TAGLO
:
7818 case CP0_REG28__TAGLO1
:
7819 case CP0_REG28__TAGLO2
:
7820 case CP0_REG28__TAGLO3
:
7821 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7822 register_name
= "TagLo";
7824 case CP0_REG28__DATALO
:
7825 case CP0_REG28__DATALO1
:
7826 case CP0_REG28__DATALO2
:
7827 case CP0_REG28__DATALO3
:
7828 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7829 register_name
= "DataLo";
7832 goto cp0_unimplemented
;
7835 case CP0_REGISTER_29
:
7837 case CP0_REG29__TAGHI
:
7838 case CP0_REG29__TAGHI1
:
7839 case CP0_REG29__TAGHI2
:
7840 case CP0_REG29__TAGHI3
:
7841 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7842 register_name
= "TagHi";
7844 case CP0_REG29__DATAHI
:
7845 case CP0_REG29__DATAHI1
:
7846 case CP0_REG29__DATAHI2
:
7847 case CP0_REG29__DATAHI3
:
7848 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7849 register_name
= "DataHi";
7852 goto cp0_unimplemented
;
7855 case CP0_REGISTER_30
:
7857 case CP0_REG30__ERROREPC
:
7858 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7859 register_name
= "ErrorEPC";
7862 goto cp0_unimplemented
;
7865 case CP0_REGISTER_31
:
7867 case CP0_REG31__DESAVE
:
7869 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7870 register_name
= "DESAVE";
7872 case CP0_REG31__KSCRATCH1
:
7873 case CP0_REG31__KSCRATCH2
:
7874 case CP0_REG31__KSCRATCH3
:
7875 case CP0_REG31__KSCRATCH4
:
7876 case CP0_REG31__KSCRATCH5
:
7877 case CP0_REG31__KSCRATCH6
:
7878 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7879 tcg_gen_ld_tl(arg
, cpu_env
,
7880 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7881 register_name
= "KScratch";
7884 goto cp0_unimplemented
;
7888 goto cp0_unimplemented
;
7890 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
7894 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
7895 register_name
, reg
, sel
);
7896 gen_mfc0_unimplemented(ctx
, arg
);
7899 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7901 const char *register_name
= "invalid";
7904 check_insn(ctx
, ISA_MIPS_R1
);
7907 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7912 case CP0_REGISTER_00
:
7914 case CP0_REG00__INDEX
:
7915 gen_helper_mtc0_index(cpu_env
, arg
);
7916 register_name
= "Index";
7918 case CP0_REG00__MVPCONTROL
:
7919 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7920 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7921 register_name
= "MVPControl";
7923 case CP0_REG00__MVPCONF0
:
7924 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7926 register_name
= "MVPConf0";
7928 case CP0_REG00__MVPCONF1
:
7929 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7931 register_name
= "MVPConf1";
7933 case CP0_REG00__VPCONTROL
:
7936 register_name
= "VPControl";
7939 goto cp0_unimplemented
;
7942 case CP0_REGISTER_01
:
7944 case CP0_REG01__RANDOM
:
7946 register_name
= "Random";
7948 case CP0_REG01__VPECONTROL
:
7949 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7950 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7951 register_name
= "VPEControl";
7953 case CP0_REG01__VPECONF0
:
7954 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7955 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7956 register_name
= "VPEConf0";
7958 case CP0_REG01__VPECONF1
:
7959 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7960 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7961 register_name
= "VPEConf1";
7963 case CP0_REG01__YQMASK
:
7964 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7965 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7966 register_name
= "YQMask";
7968 case CP0_REG01__VPESCHEDULE
:
7969 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7970 tcg_gen_st_tl(arg
, cpu_env
,
7971 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7972 register_name
= "VPESchedule";
7974 case CP0_REG01__VPESCHEFBACK
:
7975 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7976 tcg_gen_st_tl(arg
, cpu_env
,
7977 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7978 register_name
= "VPEScheFBack";
7980 case CP0_REG01__VPEOPT
:
7981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7982 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7983 register_name
= "VPEOpt";
7986 goto cp0_unimplemented
;
7989 case CP0_REGISTER_02
:
7991 case CP0_REG02__ENTRYLO0
:
7992 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7993 register_name
= "EntryLo0";
7995 case CP0_REG02__TCSTATUS
:
7996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7997 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7998 register_name
= "TCStatus";
8000 case CP0_REG02__TCBIND
:
8001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8002 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8003 register_name
= "TCBind";
8005 case CP0_REG02__TCRESTART
:
8006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8007 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8008 register_name
= "TCRestart";
8010 case CP0_REG02__TCHALT
:
8011 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8012 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8013 register_name
= "TCHalt";
8015 case CP0_REG02__TCCONTEXT
:
8016 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8017 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8018 register_name
= "TCContext";
8020 case CP0_REG02__TCSCHEDULE
:
8021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8022 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8023 register_name
= "TCSchedule";
8025 case CP0_REG02__TCSCHEFBACK
:
8026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8027 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8028 register_name
= "TCScheFBack";
8031 goto cp0_unimplemented
;
8034 case CP0_REGISTER_03
:
8036 case CP0_REG03__ENTRYLO1
:
8037 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8038 register_name
= "EntryLo1";
8040 case CP0_REG03__GLOBALNUM
:
8043 register_name
= "GlobalNumber";
8046 goto cp0_unimplemented
;
8049 case CP0_REGISTER_04
:
8051 case CP0_REG04__CONTEXT
:
8052 gen_helper_mtc0_context(cpu_env
, arg
);
8053 register_name
= "Context";
8055 case CP0_REG04__CONTEXTCONFIG
:
8057 /* gen_helper_dmtc0_contextconfig(arg); */
8058 register_name
= "ContextConfig";
8059 goto cp0_unimplemented
;
8060 case CP0_REG04__USERLOCAL
:
8061 CP0_CHECK(ctx
->ulri
);
8062 tcg_gen_st_tl(arg
, cpu_env
,
8063 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8064 register_name
= "UserLocal";
8066 case CP0_REG04__MMID
:
8068 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
8069 register_name
= "MMID";
8072 goto cp0_unimplemented
;
8075 case CP0_REGISTER_05
:
8077 case CP0_REG05__PAGEMASK
:
8078 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8079 register_name
= "PageMask";
8081 case CP0_REG05__PAGEGRAIN
:
8082 check_insn(ctx
, ISA_MIPS_R2
);
8083 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8084 register_name
= "PageGrain";
8086 case CP0_REG05__SEGCTL0
:
8088 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8089 register_name
= "SegCtl0";
8091 case CP0_REG05__SEGCTL1
:
8093 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8094 register_name
= "SegCtl1";
8096 case CP0_REG05__SEGCTL2
:
8098 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8099 register_name
= "SegCtl2";
8101 case CP0_REG05__PWBASE
:
8103 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8104 register_name
= "PWBase";
8106 case CP0_REG05__PWFIELD
:
8108 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8109 register_name
= "PWField";
8111 case CP0_REG05__PWSIZE
:
8113 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8114 register_name
= "PWSize";
8117 goto cp0_unimplemented
;
8120 case CP0_REGISTER_06
:
8122 case CP0_REG06__WIRED
:
8123 gen_helper_mtc0_wired(cpu_env
, arg
);
8124 register_name
= "Wired";
8126 case CP0_REG06__SRSCONF0
:
8127 check_insn(ctx
, ISA_MIPS_R2
);
8128 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8129 register_name
= "SRSConf0";
8131 case CP0_REG06__SRSCONF1
:
8132 check_insn(ctx
, ISA_MIPS_R2
);
8133 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8134 register_name
= "SRSConf1";
8136 case CP0_REG06__SRSCONF2
:
8137 check_insn(ctx
, ISA_MIPS_R2
);
8138 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8139 register_name
= "SRSConf2";
8141 case CP0_REG06__SRSCONF3
:
8142 check_insn(ctx
, ISA_MIPS_R2
);
8143 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8144 register_name
= "SRSConf3";
8146 case CP0_REG06__SRSCONF4
:
8147 check_insn(ctx
, ISA_MIPS_R2
);
8148 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8149 register_name
= "SRSConf4";
8151 case CP0_REG06__PWCTL
:
8153 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8154 register_name
= "PWCtl";
8157 goto cp0_unimplemented
;
8160 case CP0_REGISTER_07
:
8162 case CP0_REG07__HWRENA
:
8163 check_insn(ctx
, ISA_MIPS_R2
);
8164 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8165 ctx
->base
.is_jmp
= DISAS_STOP
;
8166 register_name
= "HWREna";
8169 goto cp0_unimplemented
;
8172 case CP0_REGISTER_08
:
8174 case CP0_REG08__BADVADDR
:
8176 register_name
= "BadVAddr";
8178 case CP0_REG08__BADINSTR
:
8180 register_name
= "BadInstr";
8182 case CP0_REG08__BADINSTRP
:
8184 register_name
= "BadInstrP";
8186 case CP0_REG08__BADINSTRX
:
8188 register_name
= "BadInstrX";
8191 goto cp0_unimplemented
;
8194 case CP0_REGISTER_09
:
8196 case CP0_REG09__COUNT
:
8197 gen_helper_mtc0_count(cpu_env
, arg
);
8198 register_name
= "Count";
8200 case CP0_REG09__SAARI
:
8201 CP0_CHECK(ctx
->saar
);
8202 gen_helper_mtc0_saari(cpu_env
, arg
);
8203 register_name
= "SAARI";
8205 case CP0_REG09__SAAR
:
8206 CP0_CHECK(ctx
->saar
);
8207 gen_helper_mtc0_saar(cpu_env
, arg
);
8208 register_name
= "SAAR";
8211 goto cp0_unimplemented
;
8213 /* Stop translation as we may have switched the execution mode */
8214 ctx
->base
.is_jmp
= DISAS_STOP
;
8216 case CP0_REGISTER_10
:
8218 case CP0_REG10__ENTRYHI
:
8219 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8220 register_name
= "EntryHi";
8223 goto cp0_unimplemented
;
8226 case CP0_REGISTER_11
:
8228 case CP0_REG11__COMPARE
:
8229 gen_helper_mtc0_compare(cpu_env
, arg
);
8230 register_name
= "Compare";
8232 /* 6,7 are implementation dependent */
8234 goto cp0_unimplemented
;
8236 /* Stop translation as we may have switched the execution mode */
8237 ctx
->base
.is_jmp
= DISAS_STOP
;
8239 case CP0_REGISTER_12
:
8241 case CP0_REG12__STATUS
:
8242 save_cpu_state(ctx
, 1);
8243 gen_helper_mtc0_status(cpu_env
, arg
);
8244 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8245 gen_save_pc(ctx
->base
.pc_next
+ 4);
8246 ctx
->base
.is_jmp
= DISAS_EXIT
;
8247 register_name
= "Status";
8249 case CP0_REG12__INTCTL
:
8250 check_insn(ctx
, ISA_MIPS_R2
);
8251 gen_helper_mtc0_intctl(cpu_env
, arg
);
8252 /* Stop translation as we may have switched the execution mode */
8253 ctx
->base
.is_jmp
= DISAS_STOP
;
8254 register_name
= "IntCtl";
8256 case CP0_REG12__SRSCTL
:
8257 check_insn(ctx
, ISA_MIPS_R2
);
8258 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8259 /* Stop translation as we may have switched the execution mode */
8260 ctx
->base
.is_jmp
= DISAS_STOP
;
8261 register_name
= "SRSCtl";
8263 case CP0_REG12__SRSMAP
:
8264 check_insn(ctx
, ISA_MIPS_R2
);
8265 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8266 /* Stop translation as we may have switched the execution mode */
8267 ctx
->base
.is_jmp
= DISAS_STOP
;
8268 register_name
= "SRSMap";
8271 goto cp0_unimplemented
;
8274 case CP0_REGISTER_13
:
8276 case CP0_REG13__CAUSE
:
8277 save_cpu_state(ctx
, 1);
8278 gen_helper_mtc0_cause(cpu_env
, arg
);
8280 * Stop translation as we may have triggered an interrupt.
8281 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8282 * translated code to check for pending interrupts.
8284 gen_save_pc(ctx
->base
.pc_next
+ 4);
8285 ctx
->base
.is_jmp
= DISAS_EXIT
;
8286 register_name
= "Cause";
8289 goto cp0_unimplemented
;
8292 case CP0_REGISTER_14
:
8294 case CP0_REG14__EPC
:
8295 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8296 register_name
= "EPC";
8299 goto cp0_unimplemented
;
8302 case CP0_REGISTER_15
:
8304 case CP0_REG15__PRID
:
8306 register_name
= "PRid";
8308 case CP0_REG15__EBASE
:
8309 check_insn(ctx
, ISA_MIPS_R2
);
8310 gen_helper_mtc0_ebase(cpu_env
, arg
);
8311 register_name
= "EBase";
8314 goto cp0_unimplemented
;
8317 case CP0_REGISTER_16
:
8319 case CP0_REG16__CONFIG
:
8320 gen_helper_mtc0_config0(cpu_env
, arg
);
8321 register_name
= "Config";
8322 /* Stop translation as we may have switched the execution mode */
8323 ctx
->base
.is_jmp
= DISAS_STOP
;
8325 case CP0_REG16__CONFIG1
:
8326 /* ignored, read only */
8327 register_name
= "Config1";
8329 case CP0_REG16__CONFIG2
:
8330 gen_helper_mtc0_config2(cpu_env
, arg
);
8331 register_name
= "Config2";
8332 /* Stop translation as we may have switched the execution mode */
8333 ctx
->base
.is_jmp
= DISAS_STOP
;
8335 case CP0_REG16__CONFIG3
:
8336 gen_helper_mtc0_config3(cpu_env
, arg
);
8337 register_name
= "Config3";
8338 /* Stop translation as we may have switched the execution mode */
8339 ctx
->base
.is_jmp
= DISAS_STOP
;
8341 case CP0_REG16__CONFIG4
:
8342 /* currently ignored */
8343 register_name
= "Config4";
8345 case CP0_REG16__CONFIG5
:
8346 gen_helper_mtc0_config5(cpu_env
, arg
);
8347 register_name
= "Config5";
8348 /* Stop translation as we may have switched the execution mode */
8349 ctx
->base
.is_jmp
= DISAS_STOP
;
8351 /* 6,7 are implementation dependent */
8353 register_name
= "Invalid config selector";
8354 goto cp0_unimplemented
;
8357 case CP0_REGISTER_17
:
8359 case CP0_REG17__LLADDR
:
8360 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8361 register_name
= "LLAddr";
8363 case CP0_REG17__MAAR
:
8364 CP0_CHECK(ctx
->mrp
);
8365 gen_helper_mtc0_maar(cpu_env
, arg
);
8366 register_name
= "MAAR";
8368 case CP0_REG17__MAARI
:
8369 CP0_CHECK(ctx
->mrp
);
8370 gen_helper_mtc0_maari(cpu_env
, arg
);
8371 register_name
= "MAARI";
8374 goto cp0_unimplemented
;
8377 case CP0_REGISTER_18
:
8379 case CP0_REG18__WATCHLO0
:
8380 case CP0_REG18__WATCHLO1
:
8381 case CP0_REG18__WATCHLO2
:
8382 case CP0_REG18__WATCHLO3
:
8383 case CP0_REG18__WATCHLO4
:
8384 case CP0_REG18__WATCHLO5
:
8385 case CP0_REG18__WATCHLO6
:
8386 case CP0_REG18__WATCHLO7
:
8387 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8388 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8389 register_name
= "WatchLo";
8392 goto cp0_unimplemented
;
8395 case CP0_REGISTER_19
:
8397 case CP0_REG19__WATCHHI0
:
8398 case CP0_REG19__WATCHHI1
:
8399 case CP0_REG19__WATCHHI2
:
8400 case CP0_REG19__WATCHHI3
:
8401 case CP0_REG19__WATCHHI4
:
8402 case CP0_REG19__WATCHHI5
:
8403 case CP0_REG19__WATCHHI6
:
8404 case CP0_REG19__WATCHHI7
:
8405 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8406 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8407 register_name
= "WatchHi";
8410 goto cp0_unimplemented
;
8413 case CP0_REGISTER_20
:
8415 case CP0_REG20__XCONTEXT
:
8416 check_insn(ctx
, ISA_MIPS3
);
8417 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8418 register_name
= "XContext";
8421 goto cp0_unimplemented
;
8424 case CP0_REGISTER_21
:
8425 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8426 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8429 gen_helper_mtc0_framemask(cpu_env
, arg
);
8430 register_name
= "Framemask";
8433 goto cp0_unimplemented
;
8436 case CP0_REGISTER_22
:
8438 register_name
= "Diagnostic"; /* implementation dependent */
8440 case CP0_REGISTER_23
:
8442 case CP0_REG23__DEBUG
:
8443 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8444 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8445 gen_save_pc(ctx
->base
.pc_next
+ 4);
8446 ctx
->base
.is_jmp
= DISAS_EXIT
;
8447 register_name
= "Debug";
8449 case CP0_REG23__TRACECONTROL
:
8450 /* PDtrace support */
8451 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
8452 /* Stop translation as we may have switched the execution mode */
8453 ctx
->base
.is_jmp
= DISAS_STOP
;
8454 register_name
= "TraceControl";
8455 goto cp0_unimplemented
;
8456 case CP0_REG23__TRACECONTROL2
:
8457 /* PDtrace support */
8458 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
8459 /* Stop translation as we may have switched the execution mode */
8460 ctx
->base
.is_jmp
= DISAS_STOP
;
8461 register_name
= "TraceControl2";
8462 goto cp0_unimplemented
;
8463 case CP0_REG23__USERTRACEDATA1
:
8464 /* PDtrace support */
8465 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
8466 /* Stop translation as we may have switched the execution mode */
8467 ctx
->base
.is_jmp
= DISAS_STOP
;
8468 register_name
= "UserTraceData1";
8469 goto cp0_unimplemented
;
8470 case CP0_REG23__TRACEIBPC
:
8471 /* PDtrace support */
8472 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
8473 /* Stop translation as we may have switched the execution mode */
8474 ctx
->base
.is_jmp
= DISAS_STOP
;
8475 register_name
= "TraceIBPC";
8476 goto cp0_unimplemented
;
8477 case CP0_REG23__TRACEDBPC
:
8478 /* PDtrace support */
8479 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
8480 /* Stop translation as we may have switched the execution mode */
8481 ctx
->base
.is_jmp
= DISAS_STOP
;
8482 register_name
= "TraceDBPC";
8483 goto cp0_unimplemented
;
8485 goto cp0_unimplemented
;
8488 case CP0_REGISTER_24
:
8490 case CP0_REG24__DEPC
:
8492 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8493 register_name
= "DEPC";
8496 goto cp0_unimplemented
;
8499 case CP0_REGISTER_25
:
8501 case CP0_REG25__PERFCTL0
:
8502 gen_helper_mtc0_performance0(cpu_env
, arg
);
8503 register_name
= "Performance0";
8505 case CP0_REG25__PERFCNT0
:
8506 /* gen_helper_mtc0_performance1(cpu_env, arg); */
8507 register_name
= "Performance1";
8508 goto cp0_unimplemented
;
8509 case CP0_REG25__PERFCTL1
:
8510 /* gen_helper_mtc0_performance2(cpu_env, arg); */
8511 register_name
= "Performance2";
8512 goto cp0_unimplemented
;
8513 case CP0_REG25__PERFCNT1
:
8514 /* gen_helper_mtc0_performance3(cpu_env, arg); */
8515 register_name
= "Performance3";
8516 goto cp0_unimplemented
;
8517 case CP0_REG25__PERFCTL2
:
8518 /* gen_helper_mtc0_performance4(cpu_env, arg); */
8519 register_name
= "Performance4";
8520 goto cp0_unimplemented
;
8521 case CP0_REG25__PERFCNT2
:
8522 /* gen_helper_mtc0_performance5(cpu_env, arg); */
8523 register_name
= "Performance5";
8524 goto cp0_unimplemented
;
8525 case CP0_REG25__PERFCTL3
:
8526 /* gen_helper_mtc0_performance6(cpu_env, arg); */
8527 register_name
= "Performance6";
8528 goto cp0_unimplemented
;
8529 case CP0_REG25__PERFCNT3
:
8530 /* gen_helper_mtc0_performance7(cpu_env, arg); */
8531 register_name
= "Performance7";
8532 goto cp0_unimplemented
;
8534 goto cp0_unimplemented
;
8537 case CP0_REGISTER_26
:
8539 case CP0_REG26__ERRCTL
:
8540 gen_helper_mtc0_errctl(cpu_env
, arg
);
8541 ctx
->base
.is_jmp
= DISAS_STOP
;
8542 register_name
= "ErrCtl";
8545 goto cp0_unimplemented
;
8548 case CP0_REGISTER_27
:
8550 case CP0_REG27__CACHERR
:
8552 register_name
= "CacheErr";
8555 goto cp0_unimplemented
;
8558 case CP0_REGISTER_28
:
8560 case CP0_REG28__TAGLO
:
8561 case CP0_REG28__TAGLO1
:
8562 case CP0_REG28__TAGLO2
:
8563 case CP0_REG28__TAGLO3
:
8564 gen_helper_mtc0_taglo(cpu_env
, arg
);
8565 register_name
= "TagLo";
8567 case CP0_REG28__DATALO
:
8568 case CP0_REG28__DATALO1
:
8569 case CP0_REG28__DATALO2
:
8570 case CP0_REG28__DATALO3
:
8571 gen_helper_mtc0_datalo(cpu_env
, arg
);
8572 register_name
= "DataLo";
8575 goto cp0_unimplemented
;
8578 case CP0_REGISTER_29
:
8580 case CP0_REG29__TAGHI
:
8581 case CP0_REG29__TAGHI1
:
8582 case CP0_REG29__TAGHI2
:
8583 case CP0_REG29__TAGHI3
:
8584 gen_helper_mtc0_taghi(cpu_env
, arg
);
8585 register_name
= "TagHi";
8587 case CP0_REG29__DATAHI
:
8588 case CP0_REG29__DATAHI1
:
8589 case CP0_REG29__DATAHI2
:
8590 case CP0_REG29__DATAHI3
:
8591 gen_helper_mtc0_datahi(cpu_env
, arg
);
8592 register_name
= "DataHi";
8595 register_name
= "invalid sel";
8596 goto cp0_unimplemented
;
8599 case CP0_REGISTER_30
:
8601 case CP0_REG30__ERROREPC
:
8602 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8603 register_name
= "ErrorEPC";
8606 goto cp0_unimplemented
;
8609 case CP0_REGISTER_31
:
8611 case CP0_REG31__DESAVE
:
8613 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8614 register_name
= "DESAVE";
8616 case CP0_REG31__KSCRATCH1
:
8617 case CP0_REG31__KSCRATCH2
:
8618 case CP0_REG31__KSCRATCH3
:
8619 case CP0_REG31__KSCRATCH4
:
8620 case CP0_REG31__KSCRATCH5
:
8621 case CP0_REG31__KSCRATCH6
:
8622 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8623 tcg_gen_st_tl(arg
, cpu_env
,
8624 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
8625 register_name
= "KScratch";
8628 goto cp0_unimplemented
;
8632 goto cp0_unimplemented
;
8634 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
8636 /* For simplicity assume that all writes can cause interrupts. */
8637 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8639 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8640 * translated code to check for pending interrupts.
8642 gen_save_pc(ctx
->base
.pc_next
+ 4);
8643 ctx
->base
.is_jmp
= DISAS_EXIT
;
8648 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
8649 register_name
, reg
, sel
);
8651 #endif /* TARGET_MIPS64 */
8653 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8654 int u
, int sel
, int h
)
8656 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8657 TCGv t0
= tcg_temp_local_new();
8659 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8660 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8661 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8662 tcg_gen_movi_tl(t0
, -1);
8663 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8664 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8665 tcg_gen_movi_tl(t0
, -1);
8666 } else if (u
== 0) {
8671 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
8674 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
8684 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
8687 gen_helper_mftc0_tcbind(t0
, cpu_env
);
8690 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
8693 gen_helper_mftc0_tchalt(t0
, cpu_env
);
8696 gen_helper_mftc0_tccontext(t0
, cpu_env
);
8699 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
8702 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
8705 gen_mfc0(ctx
, t0
, rt
, sel
);
8712 gen_helper_mftc0_entryhi(t0
, cpu_env
);
8715 gen_mfc0(ctx
, t0
, rt
, sel
);
8722 gen_helper_mftc0_status(t0
, cpu_env
);
8725 gen_mfc0(ctx
, t0
, rt
, sel
);
8732 gen_helper_mftc0_cause(t0
, cpu_env
);
8742 gen_helper_mftc0_epc(t0
, cpu_env
);
8752 gen_helper_mftc0_ebase(t0
, cpu_env
);
8769 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
8779 gen_helper_mftc0_debug(t0
, cpu_env
);
8782 gen_mfc0(ctx
, t0
, rt
, sel
);
8787 gen_mfc0(ctx
, t0
, rt
, sel
);
8791 /* GPR registers. */
8793 gen_helper_1e0i(mftgpr
, t0
, rt
);
8795 /* Auxiliary CPU registers */
8799 gen_helper_1e0i(mftlo
, t0
, 0);
8802 gen_helper_1e0i(mfthi
, t0
, 0);
8805 gen_helper_1e0i(mftacx
, t0
, 0);
8808 gen_helper_1e0i(mftlo
, t0
, 1);
8811 gen_helper_1e0i(mfthi
, t0
, 1);
8814 gen_helper_1e0i(mftacx
, t0
, 1);
8817 gen_helper_1e0i(mftlo
, t0
, 2);
8820 gen_helper_1e0i(mfthi
, t0
, 2);
8823 gen_helper_1e0i(mftacx
, t0
, 2);
8826 gen_helper_1e0i(mftlo
, t0
, 3);
8829 gen_helper_1e0i(mfthi
, t0
, 3);
8832 gen_helper_1e0i(mftacx
, t0
, 3);
8835 gen_helper_mftdsp(t0
, cpu_env
);
8841 /* Floating point (COP1). */
8843 /* XXX: For now we support only a single FPU context. */
8845 TCGv_i32 fp0
= tcg_temp_new_i32();
8847 gen_load_fpr32(ctx
, fp0
, rt
);
8848 tcg_gen_ext_i32_tl(t0
, fp0
);
8849 tcg_temp_free_i32(fp0
);
8851 TCGv_i32 fp0
= tcg_temp_new_i32();
8853 gen_load_fpr32h(ctx
, fp0
, rt
);
8854 tcg_gen_ext_i32_tl(t0
, fp0
);
8855 tcg_temp_free_i32(fp0
);
8859 /* XXX: For now we support only a single FPU context. */
8860 gen_helper_1e0i(cfc1
, t0
, rt
);
8862 /* COP2: Not implemented. */
8870 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8871 gen_store_gpr(t0
, rd
);
8877 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8878 gen_reserved_instruction(ctx
);
8881 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8882 int u
, int sel
, int h
)
8884 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8885 TCGv t0
= tcg_temp_local_new();
8887 gen_load_gpr(t0
, rt
);
8888 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8889 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8890 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8893 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8894 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8897 } else if (u
== 0) {
8902 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8905 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8915 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8918 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8921 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8924 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8927 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8930 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8933 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8936 gen_mtc0(ctx
, t0
, rd
, sel
);
8943 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8946 gen_mtc0(ctx
, t0
, rd
, sel
);
8953 gen_helper_mttc0_status(cpu_env
, t0
);
8956 gen_mtc0(ctx
, t0
, rd
, sel
);
8963 gen_helper_mttc0_cause(cpu_env
, t0
);
8973 gen_helper_mttc0_ebase(cpu_env
, t0
);
8983 gen_helper_mttc0_debug(cpu_env
, t0
);
8986 gen_mtc0(ctx
, t0
, rd
, sel
);
8991 gen_mtc0(ctx
, t0
, rd
, sel
);
8995 /* GPR registers. */
8997 gen_helper_0e1i(mttgpr
, t0
, rd
);
8999 /* Auxiliary CPU registers */
9003 gen_helper_0e1i(mttlo
, t0
, 0);
9006 gen_helper_0e1i(mtthi
, t0
, 0);
9009 gen_helper_0e1i(mttacx
, t0
, 0);
9012 gen_helper_0e1i(mttlo
, t0
, 1);
9015 gen_helper_0e1i(mtthi
, t0
, 1);
9018 gen_helper_0e1i(mttacx
, t0
, 1);
9021 gen_helper_0e1i(mttlo
, t0
, 2);
9024 gen_helper_0e1i(mtthi
, t0
, 2);
9027 gen_helper_0e1i(mttacx
, t0
, 2);
9030 gen_helper_0e1i(mttlo
, t0
, 3);
9033 gen_helper_0e1i(mtthi
, t0
, 3);
9036 gen_helper_0e1i(mttacx
, t0
, 3);
9039 gen_helper_mttdsp(cpu_env
, t0
);
9045 /* Floating point (COP1). */
9047 /* XXX: For now we support only a single FPU context. */
9049 TCGv_i32 fp0
= tcg_temp_new_i32();
9051 tcg_gen_trunc_tl_i32(fp0
, t0
);
9052 gen_store_fpr32(ctx
, fp0
, rd
);
9053 tcg_temp_free_i32(fp0
);
9055 TCGv_i32 fp0
= tcg_temp_new_i32();
9057 tcg_gen_trunc_tl_i32(fp0
, t0
);
9058 gen_store_fpr32h(ctx
, fp0
, rd
);
9059 tcg_temp_free_i32(fp0
);
9063 /* XXX: For now we support only a single FPU context. */
9064 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(rd
), rt
);
9065 /* Stop translation as we may have changed hflags */
9066 ctx
->base
.is_jmp
= DISAS_STOP
;
9068 /* COP2: Not implemented. */
9076 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9082 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9083 gen_reserved_instruction(ctx
);
9086 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
9089 const char *opn
= "ldst";
9091 check_cp0_enabled(ctx
);
9098 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9103 TCGv t0
= tcg_temp_new();
9105 gen_load_gpr(t0
, rt
);
9106 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9111 #if defined(TARGET_MIPS64)
9113 check_insn(ctx
, ISA_MIPS3
);
9118 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9122 check_insn(ctx
, ISA_MIPS3
);
9124 TCGv t0
= tcg_temp_new();
9126 gen_load_gpr(t0
, rt
);
9127 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9139 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9145 TCGv t0
= tcg_temp_new();
9146 gen_load_gpr(t0
, rt
);
9147 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9153 check_cp0_enabled(ctx
);
9158 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9159 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9163 check_cp0_enabled(ctx
);
9164 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9165 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9170 if (!env
->tlb
->helper_tlbwi
) {
9173 gen_helper_tlbwi(cpu_env
);
9178 if (!env
->tlb
->helper_tlbinv
) {
9181 gen_helper_tlbinv(cpu_env
);
9182 } /* treat as nop if TLBINV not supported */
9187 if (!env
->tlb
->helper_tlbinvf
) {
9190 gen_helper_tlbinvf(cpu_env
);
9191 } /* treat as nop if TLBINV not supported */
9195 if (!env
->tlb
->helper_tlbwr
) {
9198 gen_helper_tlbwr(cpu_env
);
9202 if (!env
->tlb
->helper_tlbp
) {
9205 gen_helper_tlbp(cpu_env
);
9209 if (!env
->tlb
->helper_tlbr
) {
9212 gen_helper_tlbr(cpu_env
);
9214 case OPC_ERET
: /* OPC_ERETNC */
9215 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9216 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9219 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9220 if (ctx
->opcode
& (1 << bit_shift
)) {
9223 check_insn(ctx
, ISA_MIPS_R5
);
9224 gen_helper_eretnc(cpu_env
);
9228 check_insn(ctx
, ISA_MIPS2
);
9229 gen_helper_eret(cpu_env
);
9231 ctx
->base
.is_jmp
= DISAS_EXIT
;
9236 check_insn(ctx
, ISA_MIPS_R1
);
9237 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9238 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9241 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9243 gen_reserved_instruction(ctx
);
9245 gen_helper_deret(cpu_env
);
9246 ctx
->base
.is_jmp
= DISAS_EXIT
;
9251 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
9252 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9253 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9256 /* If we get an exception, we want to restart at next instruction */
9257 ctx
->base
.pc_next
+= 4;
9258 save_cpu_state(ctx
, 1);
9259 ctx
->base
.pc_next
-= 4;
9260 gen_helper_wait(cpu_env
);
9261 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9266 gen_reserved_instruction(ctx
);
9269 (void)opn
; /* avoid a compiler warning */
9271 #endif /* !CONFIG_USER_ONLY */
9273 /* CP1 Branches (before delay slot) */
9274 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9275 int32_t cc
, int32_t offset
)
9277 target_ulong btarget
;
9278 TCGv_i32 t0
= tcg_temp_new_i32();
9280 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9281 gen_reserved_instruction(ctx
);
9286 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
9289 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9293 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9294 tcg_gen_not_i32(t0
, t0
);
9295 tcg_gen_andi_i32(t0
, t0
, 1);
9296 tcg_gen_extu_i32_tl(bcond
, t0
);
9299 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9300 tcg_gen_not_i32(t0
, t0
);
9301 tcg_gen_andi_i32(t0
, t0
, 1);
9302 tcg_gen_extu_i32_tl(bcond
, t0
);
9305 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9306 tcg_gen_andi_i32(t0
, t0
, 1);
9307 tcg_gen_extu_i32_tl(bcond
, t0
);
9310 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9311 tcg_gen_andi_i32(t0
, t0
, 1);
9312 tcg_gen_extu_i32_tl(bcond
, t0
);
9314 ctx
->hflags
|= MIPS_HFLAG_BL
;
9318 TCGv_i32 t1
= tcg_temp_new_i32();
9319 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9320 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9321 tcg_gen_nand_i32(t0
, t0
, t1
);
9322 tcg_temp_free_i32(t1
);
9323 tcg_gen_andi_i32(t0
, t0
, 1);
9324 tcg_gen_extu_i32_tl(bcond
, t0
);
9329 TCGv_i32 t1
= tcg_temp_new_i32();
9330 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9331 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9332 tcg_gen_or_i32(t0
, t0
, t1
);
9333 tcg_temp_free_i32(t1
);
9334 tcg_gen_andi_i32(t0
, t0
, 1);
9335 tcg_gen_extu_i32_tl(bcond
, t0
);
9340 TCGv_i32 t1
= tcg_temp_new_i32();
9341 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9342 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9343 tcg_gen_and_i32(t0
, t0
, t1
);
9344 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9345 tcg_gen_and_i32(t0
, t0
, t1
);
9346 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9347 tcg_gen_nand_i32(t0
, t0
, t1
);
9348 tcg_temp_free_i32(t1
);
9349 tcg_gen_andi_i32(t0
, t0
, 1);
9350 tcg_gen_extu_i32_tl(bcond
, t0
);
9355 TCGv_i32 t1
= tcg_temp_new_i32();
9356 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9357 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9358 tcg_gen_or_i32(t0
, t0
, t1
);
9359 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9360 tcg_gen_or_i32(t0
, t0
, t1
);
9361 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9362 tcg_gen_or_i32(t0
, t0
, t1
);
9363 tcg_temp_free_i32(t1
);
9364 tcg_gen_andi_i32(t0
, t0
, 1);
9365 tcg_gen_extu_i32_tl(bcond
, t0
);
9368 ctx
->hflags
|= MIPS_HFLAG_BC
;
9371 MIPS_INVAL("cp1 cond branch");
9372 gen_reserved_instruction(ctx
);
9375 ctx
->btarget
= btarget
;
9376 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9378 tcg_temp_free_i32(t0
);
9381 /* R6 CP1 Branches */
9382 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
9383 int32_t ft
, int32_t offset
,
9386 target_ulong btarget
;
9387 TCGv_i64 t0
= tcg_temp_new_i64();
9389 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9390 #ifdef MIPS_DEBUG_DISAS
9391 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
9392 "\n", ctx
->base
.pc_next
);
9394 gen_reserved_instruction(ctx
);
9398 gen_load_fpr64(ctx
, t0
, ft
);
9399 tcg_gen_andi_i64(t0
, t0
, 1);
9401 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
9405 tcg_gen_xori_i64(t0
, t0
, 1);
9406 ctx
->hflags
|= MIPS_HFLAG_BC
;
9409 /* t0 already set */
9410 ctx
->hflags
|= MIPS_HFLAG_BC
;
9413 MIPS_INVAL("cp1 cond branch");
9414 gen_reserved_instruction(ctx
);
9418 tcg_gen_trunc_i64_tl(bcond
, t0
);
9420 ctx
->btarget
= btarget
;
9422 switch (delayslot_size
) {
9424 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
9427 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9432 tcg_temp_free_i64(t0
);
9435 /* Coprocessor 1 (FPU) */
9437 #define FOP(func, fmt) (((fmt) << 21) | (func))
9440 OPC_ADD_S
= FOP(0, FMT_S
),
9441 OPC_SUB_S
= FOP(1, FMT_S
),
9442 OPC_MUL_S
= FOP(2, FMT_S
),
9443 OPC_DIV_S
= FOP(3, FMT_S
),
9444 OPC_SQRT_S
= FOP(4, FMT_S
),
9445 OPC_ABS_S
= FOP(5, FMT_S
),
9446 OPC_MOV_S
= FOP(6, FMT_S
),
9447 OPC_NEG_S
= FOP(7, FMT_S
),
9448 OPC_ROUND_L_S
= FOP(8, FMT_S
),
9449 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
9450 OPC_CEIL_L_S
= FOP(10, FMT_S
),
9451 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
9452 OPC_ROUND_W_S
= FOP(12, FMT_S
),
9453 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
9454 OPC_CEIL_W_S
= FOP(14, FMT_S
),
9455 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
9456 OPC_SEL_S
= FOP(16, FMT_S
),
9457 OPC_MOVCF_S
= FOP(17, FMT_S
),
9458 OPC_MOVZ_S
= FOP(18, FMT_S
),
9459 OPC_MOVN_S
= FOP(19, FMT_S
),
9460 OPC_SELEQZ_S
= FOP(20, FMT_S
),
9461 OPC_RECIP_S
= FOP(21, FMT_S
),
9462 OPC_RSQRT_S
= FOP(22, FMT_S
),
9463 OPC_SELNEZ_S
= FOP(23, FMT_S
),
9464 OPC_MADDF_S
= FOP(24, FMT_S
),
9465 OPC_MSUBF_S
= FOP(25, FMT_S
),
9466 OPC_RINT_S
= FOP(26, FMT_S
),
9467 OPC_CLASS_S
= FOP(27, FMT_S
),
9468 OPC_MIN_S
= FOP(28, FMT_S
),
9469 OPC_RECIP2_S
= FOP(28, FMT_S
),
9470 OPC_MINA_S
= FOP(29, FMT_S
),
9471 OPC_RECIP1_S
= FOP(29, FMT_S
),
9472 OPC_MAX_S
= FOP(30, FMT_S
),
9473 OPC_RSQRT1_S
= FOP(30, FMT_S
),
9474 OPC_MAXA_S
= FOP(31, FMT_S
),
9475 OPC_RSQRT2_S
= FOP(31, FMT_S
),
9476 OPC_CVT_D_S
= FOP(33, FMT_S
),
9477 OPC_CVT_W_S
= FOP(36, FMT_S
),
9478 OPC_CVT_L_S
= FOP(37, FMT_S
),
9479 OPC_CVT_PS_S
= FOP(38, FMT_S
),
9480 OPC_CMP_F_S
= FOP(48, FMT_S
),
9481 OPC_CMP_UN_S
= FOP(49, FMT_S
),
9482 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
9483 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
9484 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
9485 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
9486 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
9487 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
9488 OPC_CMP_SF_S
= FOP(56, FMT_S
),
9489 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
9490 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
9491 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
9492 OPC_CMP_LT_S
= FOP(60, FMT_S
),
9493 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
9494 OPC_CMP_LE_S
= FOP(62, FMT_S
),
9495 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
9497 OPC_ADD_D
= FOP(0, FMT_D
),
9498 OPC_SUB_D
= FOP(1, FMT_D
),
9499 OPC_MUL_D
= FOP(2, FMT_D
),
9500 OPC_DIV_D
= FOP(3, FMT_D
),
9501 OPC_SQRT_D
= FOP(4, FMT_D
),
9502 OPC_ABS_D
= FOP(5, FMT_D
),
9503 OPC_MOV_D
= FOP(6, FMT_D
),
9504 OPC_NEG_D
= FOP(7, FMT_D
),
9505 OPC_ROUND_L_D
= FOP(8, FMT_D
),
9506 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
9507 OPC_CEIL_L_D
= FOP(10, FMT_D
),
9508 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
9509 OPC_ROUND_W_D
= FOP(12, FMT_D
),
9510 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
9511 OPC_CEIL_W_D
= FOP(14, FMT_D
),
9512 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
9513 OPC_SEL_D
= FOP(16, FMT_D
),
9514 OPC_MOVCF_D
= FOP(17, FMT_D
),
9515 OPC_MOVZ_D
= FOP(18, FMT_D
),
9516 OPC_MOVN_D
= FOP(19, FMT_D
),
9517 OPC_SELEQZ_D
= FOP(20, FMT_D
),
9518 OPC_RECIP_D
= FOP(21, FMT_D
),
9519 OPC_RSQRT_D
= FOP(22, FMT_D
),
9520 OPC_SELNEZ_D
= FOP(23, FMT_D
),
9521 OPC_MADDF_D
= FOP(24, FMT_D
),
9522 OPC_MSUBF_D
= FOP(25, FMT_D
),
9523 OPC_RINT_D
= FOP(26, FMT_D
),
9524 OPC_CLASS_D
= FOP(27, FMT_D
),
9525 OPC_MIN_D
= FOP(28, FMT_D
),
9526 OPC_RECIP2_D
= FOP(28, FMT_D
),
9527 OPC_MINA_D
= FOP(29, FMT_D
),
9528 OPC_RECIP1_D
= FOP(29, FMT_D
),
9529 OPC_MAX_D
= FOP(30, FMT_D
),
9530 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9531 OPC_MAXA_D
= FOP(31, FMT_D
),
9532 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9533 OPC_CVT_S_D
= FOP(32, FMT_D
),
9534 OPC_CVT_W_D
= FOP(36, FMT_D
),
9535 OPC_CVT_L_D
= FOP(37, FMT_D
),
9536 OPC_CMP_F_D
= FOP(48, FMT_D
),
9537 OPC_CMP_UN_D
= FOP(49, FMT_D
),
9538 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
9539 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
9540 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
9541 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
9542 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
9543 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
9544 OPC_CMP_SF_D
= FOP(56, FMT_D
),
9545 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
9546 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
9547 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
9548 OPC_CMP_LT_D
= FOP(60, FMT_D
),
9549 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
9550 OPC_CMP_LE_D
= FOP(62, FMT_D
),
9551 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
9553 OPC_CVT_S_W
= FOP(32, FMT_W
),
9554 OPC_CVT_D_W
= FOP(33, FMT_W
),
9555 OPC_CVT_S_L
= FOP(32, FMT_L
),
9556 OPC_CVT_D_L
= FOP(33, FMT_L
),
9557 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9559 OPC_ADD_PS
= FOP(0, FMT_PS
),
9560 OPC_SUB_PS
= FOP(1, FMT_PS
),
9561 OPC_MUL_PS
= FOP(2, FMT_PS
),
9562 OPC_DIV_PS
= FOP(3, FMT_PS
),
9563 OPC_ABS_PS
= FOP(5, FMT_PS
),
9564 OPC_MOV_PS
= FOP(6, FMT_PS
),
9565 OPC_NEG_PS
= FOP(7, FMT_PS
),
9566 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9567 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9568 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9569 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9570 OPC_MULR_PS
= FOP(26, FMT_PS
),
9571 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9572 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9573 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9574 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9576 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9577 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9578 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9579 OPC_PLL_PS
= FOP(44, FMT_PS
),
9580 OPC_PLU_PS
= FOP(45, FMT_PS
),
9581 OPC_PUL_PS
= FOP(46, FMT_PS
),
9582 OPC_PUU_PS
= FOP(47, FMT_PS
),
9583 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
9584 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
9585 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
9586 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
9587 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
9588 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
9589 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
9590 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
9591 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
9592 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
9593 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
9594 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
9595 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
9596 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
9597 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
9598 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
9602 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9603 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9604 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9605 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9606 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9607 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9608 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9609 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9610 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9611 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9612 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9613 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9614 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9615 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9616 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9617 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9618 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9619 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9620 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9621 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9622 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9623 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9625 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9626 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9627 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9628 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9629 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9630 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9631 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9632 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9633 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9634 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9635 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9636 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9637 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9638 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9639 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9640 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9641 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9642 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9643 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9644 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9645 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9646 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9649 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9651 TCGv t0
= tcg_temp_new();
9656 TCGv_i32 fp0
= tcg_temp_new_i32();
9658 gen_load_fpr32(ctx
, fp0
, fs
);
9659 tcg_gen_ext_i32_tl(t0
, fp0
);
9660 tcg_temp_free_i32(fp0
);
9662 gen_store_gpr(t0
, rt
);
9665 gen_load_gpr(t0
, rt
);
9667 TCGv_i32 fp0
= tcg_temp_new_i32();
9669 tcg_gen_trunc_tl_i32(fp0
, t0
);
9670 gen_store_fpr32(ctx
, fp0
, fs
);
9671 tcg_temp_free_i32(fp0
);
9675 gen_helper_1e0i(cfc1
, t0
, fs
);
9676 gen_store_gpr(t0
, rt
);
9679 gen_load_gpr(t0
, rt
);
9680 save_cpu_state(ctx
, 0);
9681 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(fs
), rt
);
9682 /* Stop translation as we may have changed hflags */
9683 ctx
->base
.is_jmp
= DISAS_STOP
;
9685 #if defined(TARGET_MIPS64)
9687 gen_load_fpr64(ctx
, t0
, fs
);
9688 gen_store_gpr(t0
, rt
);
9691 gen_load_gpr(t0
, rt
);
9692 gen_store_fpr64(ctx
, t0
, fs
);
9697 TCGv_i32 fp0
= tcg_temp_new_i32();
9699 gen_load_fpr32h(ctx
, fp0
, fs
);
9700 tcg_gen_ext_i32_tl(t0
, fp0
);
9701 tcg_temp_free_i32(fp0
);
9703 gen_store_gpr(t0
, rt
);
9706 gen_load_gpr(t0
, rt
);
9708 TCGv_i32 fp0
= tcg_temp_new_i32();
9710 tcg_gen_trunc_tl_i32(fp0
, t0
);
9711 gen_store_fpr32h(ctx
, fp0
, fs
);
9712 tcg_temp_free_i32(fp0
);
9716 MIPS_INVAL("cp1 move");
9717 gen_reserved_instruction(ctx
);
9725 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
9742 l1
= gen_new_label();
9743 t0
= tcg_temp_new_i32();
9744 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9745 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9746 tcg_temp_free_i32(t0
);
9747 gen_load_gpr(cpu_gpr
[rd
], rs
);
9751 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9755 TCGv_i32 t0
= tcg_temp_new_i32();
9756 TCGLabel
*l1
= gen_new_label();
9764 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9765 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9766 gen_load_fpr32(ctx
, t0
, fs
);
9767 gen_store_fpr32(ctx
, t0
, fd
);
9769 tcg_temp_free_i32(t0
);
9772 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9776 TCGv_i32 t0
= tcg_temp_new_i32();
9778 TCGLabel
*l1
= gen_new_label();
9786 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9787 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9788 tcg_temp_free_i32(t0
);
9789 fp0
= tcg_temp_new_i64();
9790 gen_load_fpr64(ctx
, fp0
, fs
);
9791 gen_store_fpr64(ctx
, fp0
, fd
);
9792 tcg_temp_free_i64(fp0
);
9796 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
9800 TCGv_i32 t0
= tcg_temp_new_i32();
9801 TCGLabel
*l1
= gen_new_label();
9802 TCGLabel
*l2
= gen_new_label();
9810 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9811 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9812 gen_load_fpr32(ctx
, t0
, fs
);
9813 gen_store_fpr32(ctx
, t0
, fd
);
9816 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
9817 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
9818 gen_load_fpr32h(ctx
, t0
, fs
);
9819 gen_store_fpr32h(ctx
, t0
, fd
);
9820 tcg_temp_free_i32(t0
);
9824 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9827 TCGv_i32 t1
= tcg_const_i32(0);
9828 TCGv_i32 fp0
= tcg_temp_new_i32();
9829 TCGv_i32 fp1
= tcg_temp_new_i32();
9830 TCGv_i32 fp2
= tcg_temp_new_i32();
9831 gen_load_fpr32(ctx
, fp0
, fd
);
9832 gen_load_fpr32(ctx
, fp1
, ft
);
9833 gen_load_fpr32(ctx
, fp2
, fs
);
9837 tcg_gen_andi_i32(fp0
, fp0
, 1);
9838 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9841 tcg_gen_andi_i32(fp1
, fp1
, 1);
9842 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9845 tcg_gen_andi_i32(fp1
, fp1
, 1);
9846 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9849 MIPS_INVAL("gen_sel_s");
9850 gen_reserved_instruction(ctx
);
9854 gen_store_fpr32(ctx
, fp0
, fd
);
9855 tcg_temp_free_i32(fp2
);
9856 tcg_temp_free_i32(fp1
);
9857 tcg_temp_free_i32(fp0
);
9858 tcg_temp_free_i32(t1
);
9861 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9864 TCGv_i64 t1
= tcg_const_i64(0);
9865 TCGv_i64 fp0
= tcg_temp_new_i64();
9866 TCGv_i64 fp1
= tcg_temp_new_i64();
9867 TCGv_i64 fp2
= tcg_temp_new_i64();
9868 gen_load_fpr64(ctx
, fp0
, fd
);
9869 gen_load_fpr64(ctx
, fp1
, ft
);
9870 gen_load_fpr64(ctx
, fp2
, fs
);
9874 tcg_gen_andi_i64(fp0
, fp0
, 1);
9875 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9878 tcg_gen_andi_i64(fp1
, fp1
, 1);
9879 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9882 tcg_gen_andi_i64(fp1
, fp1
, 1);
9883 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9886 MIPS_INVAL("gen_sel_d");
9887 gen_reserved_instruction(ctx
);
9891 gen_store_fpr64(ctx
, fp0
, fd
);
9892 tcg_temp_free_i64(fp2
);
9893 tcg_temp_free_i64(fp1
);
9894 tcg_temp_free_i64(fp0
);
9895 tcg_temp_free_i64(t1
);
9898 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
9899 int ft
, int fs
, int fd
, int cc
)
9901 uint32_t func
= ctx
->opcode
& 0x3f;
9905 TCGv_i32 fp0
= tcg_temp_new_i32();
9906 TCGv_i32 fp1
= tcg_temp_new_i32();
9908 gen_load_fpr32(ctx
, fp0
, fs
);
9909 gen_load_fpr32(ctx
, fp1
, ft
);
9910 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9911 tcg_temp_free_i32(fp1
);
9912 gen_store_fpr32(ctx
, fp0
, fd
);
9913 tcg_temp_free_i32(fp0
);
9918 TCGv_i32 fp0
= tcg_temp_new_i32();
9919 TCGv_i32 fp1
= tcg_temp_new_i32();
9921 gen_load_fpr32(ctx
, fp0
, fs
);
9922 gen_load_fpr32(ctx
, fp1
, ft
);
9923 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9924 tcg_temp_free_i32(fp1
);
9925 gen_store_fpr32(ctx
, fp0
, fd
);
9926 tcg_temp_free_i32(fp0
);
9931 TCGv_i32 fp0
= tcg_temp_new_i32();
9932 TCGv_i32 fp1
= tcg_temp_new_i32();
9934 gen_load_fpr32(ctx
, fp0
, fs
);
9935 gen_load_fpr32(ctx
, fp1
, ft
);
9936 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9937 tcg_temp_free_i32(fp1
);
9938 gen_store_fpr32(ctx
, fp0
, fd
);
9939 tcg_temp_free_i32(fp0
);
9944 TCGv_i32 fp0
= tcg_temp_new_i32();
9945 TCGv_i32 fp1
= tcg_temp_new_i32();
9947 gen_load_fpr32(ctx
, fp0
, fs
);
9948 gen_load_fpr32(ctx
, fp1
, ft
);
9949 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9950 tcg_temp_free_i32(fp1
);
9951 gen_store_fpr32(ctx
, fp0
, fd
);
9952 tcg_temp_free_i32(fp0
);
9957 TCGv_i32 fp0
= tcg_temp_new_i32();
9959 gen_load_fpr32(ctx
, fp0
, fs
);
9960 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9961 gen_store_fpr32(ctx
, fp0
, fd
);
9962 tcg_temp_free_i32(fp0
);
9967 TCGv_i32 fp0
= tcg_temp_new_i32();
9969 gen_load_fpr32(ctx
, fp0
, fs
);
9971 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9973 gen_helper_float_abs_s(fp0
, fp0
);
9975 gen_store_fpr32(ctx
, fp0
, fd
);
9976 tcg_temp_free_i32(fp0
);
9981 TCGv_i32 fp0
= tcg_temp_new_i32();
9983 gen_load_fpr32(ctx
, fp0
, fs
);
9984 gen_store_fpr32(ctx
, fp0
, fd
);
9985 tcg_temp_free_i32(fp0
);
9990 TCGv_i32 fp0
= tcg_temp_new_i32();
9992 gen_load_fpr32(ctx
, fp0
, fs
);
9994 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9996 gen_helper_float_chs_s(fp0
, fp0
);
9998 gen_store_fpr32(ctx
, fp0
, fd
);
9999 tcg_temp_free_i32(fp0
);
10002 case OPC_ROUND_L_S
:
10003 check_cp1_64bitmode(ctx
);
10005 TCGv_i32 fp32
= tcg_temp_new_i32();
10006 TCGv_i64 fp64
= tcg_temp_new_i64();
10008 gen_load_fpr32(ctx
, fp32
, fs
);
10009 if (ctx
->nan2008
) {
10010 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10012 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10014 tcg_temp_free_i32(fp32
);
10015 gen_store_fpr64(ctx
, fp64
, fd
);
10016 tcg_temp_free_i64(fp64
);
10019 case OPC_TRUNC_L_S
:
10020 check_cp1_64bitmode(ctx
);
10022 TCGv_i32 fp32
= tcg_temp_new_i32();
10023 TCGv_i64 fp64
= tcg_temp_new_i64();
10025 gen_load_fpr32(ctx
, fp32
, fs
);
10026 if (ctx
->nan2008
) {
10027 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10029 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10031 tcg_temp_free_i32(fp32
);
10032 gen_store_fpr64(ctx
, fp64
, fd
);
10033 tcg_temp_free_i64(fp64
);
10037 check_cp1_64bitmode(ctx
);
10039 TCGv_i32 fp32
= tcg_temp_new_i32();
10040 TCGv_i64 fp64
= tcg_temp_new_i64();
10042 gen_load_fpr32(ctx
, fp32
, fs
);
10043 if (ctx
->nan2008
) {
10044 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10046 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10048 tcg_temp_free_i32(fp32
);
10049 gen_store_fpr64(ctx
, fp64
, fd
);
10050 tcg_temp_free_i64(fp64
);
10053 case OPC_FLOOR_L_S
:
10054 check_cp1_64bitmode(ctx
);
10056 TCGv_i32 fp32
= tcg_temp_new_i32();
10057 TCGv_i64 fp64
= tcg_temp_new_i64();
10059 gen_load_fpr32(ctx
, fp32
, fs
);
10060 if (ctx
->nan2008
) {
10061 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10063 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10065 tcg_temp_free_i32(fp32
);
10066 gen_store_fpr64(ctx
, fp64
, fd
);
10067 tcg_temp_free_i64(fp64
);
10070 case OPC_ROUND_W_S
:
10072 TCGv_i32 fp0
= tcg_temp_new_i32();
10074 gen_load_fpr32(ctx
, fp0
, fs
);
10075 if (ctx
->nan2008
) {
10076 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10078 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10080 gen_store_fpr32(ctx
, fp0
, fd
);
10081 tcg_temp_free_i32(fp0
);
10084 case OPC_TRUNC_W_S
:
10086 TCGv_i32 fp0
= tcg_temp_new_i32();
10088 gen_load_fpr32(ctx
, fp0
, fs
);
10089 if (ctx
->nan2008
) {
10090 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10092 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10094 gen_store_fpr32(ctx
, fp0
, fd
);
10095 tcg_temp_free_i32(fp0
);
10100 TCGv_i32 fp0
= tcg_temp_new_i32();
10102 gen_load_fpr32(ctx
, fp0
, fs
);
10103 if (ctx
->nan2008
) {
10104 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10106 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10108 gen_store_fpr32(ctx
, fp0
, fd
);
10109 tcg_temp_free_i32(fp0
);
10112 case OPC_FLOOR_W_S
:
10114 TCGv_i32 fp0
= tcg_temp_new_i32();
10116 gen_load_fpr32(ctx
, fp0
, fs
);
10117 if (ctx
->nan2008
) {
10118 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10120 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10122 gen_store_fpr32(ctx
, fp0
, fd
);
10123 tcg_temp_free_i32(fp0
);
10127 check_insn(ctx
, ISA_MIPS_R6
);
10128 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10131 check_insn(ctx
, ISA_MIPS_R6
);
10132 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10135 check_insn(ctx
, ISA_MIPS_R6
);
10136 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10139 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10140 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10143 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10145 TCGLabel
*l1
= gen_new_label();
10149 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10151 fp0
= tcg_temp_new_i32();
10152 gen_load_fpr32(ctx
, fp0
, fs
);
10153 gen_store_fpr32(ctx
, fp0
, fd
);
10154 tcg_temp_free_i32(fp0
);
10159 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10161 TCGLabel
*l1
= gen_new_label();
10165 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10166 fp0
= tcg_temp_new_i32();
10167 gen_load_fpr32(ctx
, fp0
, fs
);
10168 gen_store_fpr32(ctx
, fp0
, fd
);
10169 tcg_temp_free_i32(fp0
);
10176 TCGv_i32 fp0
= tcg_temp_new_i32();
10178 gen_load_fpr32(ctx
, fp0
, fs
);
10179 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10180 gen_store_fpr32(ctx
, fp0
, fd
);
10181 tcg_temp_free_i32(fp0
);
10186 TCGv_i32 fp0
= tcg_temp_new_i32();
10188 gen_load_fpr32(ctx
, fp0
, fs
);
10189 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10190 gen_store_fpr32(ctx
, fp0
, fd
);
10191 tcg_temp_free_i32(fp0
);
10195 check_insn(ctx
, ISA_MIPS_R6
);
10197 TCGv_i32 fp0
= tcg_temp_new_i32();
10198 TCGv_i32 fp1
= tcg_temp_new_i32();
10199 TCGv_i32 fp2
= tcg_temp_new_i32();
10200 gen_load_fpr32(ctx
, fp0
, fs
);
10201 gen_load_fpr32(ctx
, fp1
, ft
);
10202 gen_load_fpr32(ctx
, fp2
, fd
);
10203 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10204 gen_store_fpr32(ctx
, fp2
, fd
);
10205 tcg_temp_free_i32(fp2
);
10206 tcg_temp_free_i32(fp1
);
10207 tcg_temp_free_i32(fp0
);
10211 check_insn(ctx
, ISA_MIPS_R6
);
10213 TCGv_i32 fp0
= tcg_temp_new_i32();
10214 TCGv_i32 fp1
= tcg_temp_new_i32();
10215 TCGv_i32 fp2
= tcg_temp_new_i32();
10216 gen_load_fpr32(ctx
, fp0
, fs
);
10217 gen_load_fpr32(ctx
, fp1
, ft
);
10218 gen_load_fpr32(ctx
, fp2
, fd
);
10219 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10220 gen_store_fpr32(ctx
, fp2
, fd
);
10221 tcg_temp_free_i32(fp2
);
10222 tcg_temp_free_i32(fp1
);
10223 tcg_temp_free_i32(fp0
);
10227 check_insn(ctx
, ISA_MIPS_R6
);
10229 TCGv_i32 fp0
= tcg_temp_new_i32();
10230 gen_load_fpr32(ctx
, fp0
, fs
);
10231 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10232 gen_store_fpr32(ctx
, fp0
, fd
);
10233 tcg_temp_free_i32(fp0
);
10237 check_insn(ctx
, ISA_MIPS_R6
);
10239 TCGv_i32 fp0
= tcg_temp_new_i32();
10240 gen_load_fpr32(ctx
, fp0
, fs
);
10241 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10242 gen_store_fpr32(ctx
, fp0
, fd
);
10243 tcg_temp_free_i32(fp0
);
10246 case OPC_MIN_S
: /* OPC_RECIP2_S */
10247 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10249 TCGv_i32 fp0
= tcg_temp_new_i32();
10250 TCGv_i32 fp1
= tcg_temp_new_i32();
10251 TCGv_i32 fp2
= tcg_temp_new_i32();
10252 gen_load_fpr32(ctx
, fp0
, fs
);
10253 gen_load_fpr32(ctx
, fp1
, ft
);
10254 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10255 gen_store_fpr32(ctx
, fp2
, fd
);
10256 tcg_temp_free_i32(fp2
);
10257 tcg_temp_free_i32(fp1
);
10258 tcg_temp_free_i32(fp0
);
10261 check_cp1_64bitmode(ctx
);
10263 TCGv_i32 fp0
= tcg_temp_new_i32();
10264 TCGv_i32 fp1
= tcg_temp_new_i32();
10266 gen_load_fpr32(ctx
, fp0
, fs
);
10267 gen_load_fpr32(ctx
, fp1
, ft
);
10268 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10269 tcg_temp_free_i32(fp1
);
10270 gen_store_fpr32(ctx
, fp0
, fd
);
10271 tcg_temp_free_i32(fp0
);
10275 case OPC_MINA_S
: /* OPC_RECIP1_S */
10276 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10278 TCGv_i32 fp0
= tcg_temp_new_i32();
10279 TCGv_i32 fp1
= tcg_temp_new_i32();
10280 TCGv_i32 fp2
= tcg_temp_new_i32();
10281 gen_load_fpr32(ctx
, fp0
, fs
);
10282 gen_load_fpr32(ctx
, fp1
, ft
);
10283 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10284 gen_store_fpr32(ctx
, fp2
, fd
);
10285 tcg_temp_free_i32(fp2
);
10286 tcg_temp_free_i32(fp1
);
10287 tcg_temp_free_i32(fp0
);
10290 check_cp1_64bitmode(ctx
);
10292 TCGv_i32 fp0
= tcg_temp_new_i32();
10294 gen_load_fpr32(ctx
, fp0
, fs
);
10295 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10296 gen_store_fpr32(ctx
, fp0
, fd
);
10297 tcg_temp_free_i32(fp0
);
10301 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10302 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10304 TCGv_i32 fp0
= tcg_temp_new_i32();
10305 TCGv_i32 fp1
= tcg_temp_new_i32();
10306 gen_load_fpr32(ctx
, fp0
, fs
);
10307 gen_load_fpr32(ctx
, fp1
, ft
);
10308 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10309 gen_store_fpr32(ctx
, fp1
, fd
);
10310 tcg_temp_free_i32(fp1
);
10311 tcg_temp_free_i32(fp0
);
10314 check_cp1_64bitmode(ctx
);
10316 TCGv_i32 fp0
= tcg_temp_new_i32();
10318 gen_load_fpr32(ctx
, fp0
, fs
);
10319 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10320 gen_store_fpr32(ctx
, fp0
, fd
);
10321 tcg_temp_free_i32(fp0
);
10325 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10326 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10328 TCGv_i32 fp0
= tcg_temp_new_i32();
10329 TCGv_i32 fp1
= tcg_temp_new_i32();
10330 gen_load_fpr32(ctx
, fp0
, fs
);
10331 gen_load_fpr32(ctx
, fp1
, ft
);
10332 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10333 gen_store_fpr32(ctx
, fp1
, fd
);
10334 tcg_temp_free_i32(fp1
);
10335 tcg_temp_free_i32(fp0
);
10338 check_cp1_64bitmode(ctx
);
10340 TCGv_i32 fp0
= tcg_temp_new_i32();
10341 TCGv_i32 fp1
= tcg_temp_new_i32();
10343 gen_load_fpr32(ctx
, fp0
, fs
);
10344 gen_load_fpr32(ctx
, fp1
, ft
);
10345 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10346 tcg_temp_free_i32(fp1
);
10347 gen_store_fpr32(ctx
, fp0
, fd
);
10348 tcg_temp_free_i32(fp0
);
10353 check_cp1_registers(ctx
, fd
);
10355 TCGv_i32 fp32
= tcg_temp_new_i32();
10356 TCGv_i64 fp64
= tcg_temp_new_i64();
10358 gen_load_fpr32(ctx
, fp32
, fs
);
10359 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
10360 tcg_temp_free_i32(fp32
);
10361 gen_store_fpr64(ctx
, fp64
, fd
);
10362 tcg_temp_free_i64(fp64
);
10367 TCGv_i32 fp0
= tcg_temp_new_i32();
10369 gen_load_fpr32(ctx
, fp0
, fs
);
10370 if (ctx
->nan2008
) {
10371 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
10373 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
10375 gen_store_fpr32(ctx
, fp0
, fd
);
10376 tcg_temp_free_i32(fp0
);
10380 check_cp1_64bitmode(ctx
);
10382 TCGv_i32 fp32
= tcg_temp_new_i32();
10383 TCGv_i64 fp64
= tcg_temp_new_i64();
10385 gen_load_fpr32(ctx
, fp32
, fs
);
10386 if (ctx
->nan2008
) {
10387 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
10389 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
10391 tcg_temp_free_i32(fp32
);
10392 gen_store_fpr64(ctx
, fp64
, fd
);
10393 tcg_temp_free_i64(fp64
);
10399 TCGv_i64 fp64
= tcg_temp_new_i64();
10400 TCGv_i32 fp32_0
= tcg_temp_new_i32();
10401 TCGv_i32 fp32_1
= tcg_temp_new_i32();
10403 gen_load_fpr32(ctx
, fp32_0
, fs
);
10404 gen_load_fpr32(ctx
, fp32_1
, ft
);
10405 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
10406 tcg_temp_free_i32(fp32_1
);
10407 tcg_temp_free_i32(fp32_0
);
10408 gen_store_fpr64(ctx
, fp64
, fd
);
10409 tcg_temp_free_i64(fp64
);
10415 case OPC_CMP_UEQ_S
:
10416 case OPC_CMP_OLT_S
:
10417 case OPC_CMP_ULT_S
:
10418 case OPC_CMP_OLE_S
:
10419 case OPC_CMP_ULE_S
:
10421 case OPC_CMP_NGLE_S
:
10422 case OPC_CMP_SEQ_S
:
10423 case OPC_CMP_NGL_S
:
10425 case OPC_CMP_NGE_S
:
10427 case OPC_CMP_NGT_S
:
10428 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10429 if (ctx
->opcode
& (1 << 6)) {
10430 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
10432 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
10436 check_cp1_registers(ctx
, fs
| ft
| fd
);
10438 TCGv_i64 fp0
= tcg_temp_new_i64();
10439 TCGv_i64 fp1
= tcg_temp_new_i64();
10441 gen_load_fpr64(ctx
, fp0
, fs
);
10442 gen_load_fpr64(ctx
, fp1
, ft
);
10443 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
10444 tcg_temp_free_i64(fp1
);
10445 gen_store_fpr64(ctx
, fp0
, fd
);
10446 tcg_temp_free_i64(fp0
);
10450 check_cp1_registers(ctx
, fs
| ft
| fd
);
10452 TCGv_i64 fp0
= tcg_temp_new_i64();
10453 TCGv_i64 fp1
= tcg_temp_new_i64();
10455 gen_load_fpr64(ctx
, fp0
, fs
);
10456 gen_load_fpr64(ctx
, fp1
, ft
);
10457 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
10458 tcg_temp_free_i64(fp1
);
10459 gen_store_fpr64(ctx
, fp0
, fd
);
10460 tcg_temp_free_i64(fp0
);
10464 check_cp1_registers(ctx
, fs
| ft
| fd
);
10466 TCGv_i64 fp0
= tcg_temp_new_i64();
10467 TCGv_i64 fp1
= tcg_temp_new_i64();
10469 gen_load_fpr64(ctx
, fp0
, fs
);
10470 gen_load_fpr64(ctx
, fp1
, ft
);
10471 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
10472 tcg_temp_free_i64(fp1
);
10473 gen_store_fpr64(ctx
, fp0
, fd
);
10474 tcg_temp_free_i64(fp0
);
10478 check_cp1_registers(ctx
, fs
| ft
| fd
);
10480 TCGv_i64 fp0
= tcg_temp_new_i64();
10481 TCGv_i64 fp1
= tcg_temp_new_i64();
10483 gen_load_fpr64(ctx
, fp0
, fs
);
10484 gen_load_fpr64(ctx
, fp1
, ft
);
10485 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
10486 tcg_temp_free_i64(fp1
);
10487 gen_store_fpr64(ctx
, fp0
, fd
);
10488 tcg_temp_free_i64(fp0
);
10492 check_cp1_registers(ctx
, fs
| fd
);
10494 TCGv_i64 fp0
= tcg_temp_new_i64();
10496 gen_load_fpr64(ctx
, fp0
, fs
);
10497 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
10498 gen_store_fpr64(ctx
, fp0
, fd
);
10499 tcg_temp_free_i64(fp0
);
10503 check_cp1_registers(ctx
, fs
| fd
);
10505 TCGv_i64 fp0
= tcg_temp_new_i64();
10507 gen_load_fpr64(ctx
, fp0
, fs
);
10508 if (ctx
->abs2008
) {
10509 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
10511 gen_helper_float_abs_d(fp0
, fp0
);
10513 gen_store_fpr64(ctx
, fp0
, fd
);
10514 tcg_temp_free_i64(fp0
);
10518 check_cp1_registers(ctx
, fs
| fd
);
10520 TCGv_i64 fp0
= tcg_temp_new_i64();
10522 gen_load_fpr64(ctx
, fp0
, fs
);
10523 gen_store_fpr64(ctx
, fp0
, fd
);
10524 tcg_temp_free_i64(fp0
);
10528 check_cp1_registers(ctx
, fs
| fd
);
10530 TCGv_i64 fp0
= tcg_temp_new_i64();
10532 gen_load_fpr64(ctx
, fp0
, fs
);
10533 if (ctx
->abs2008
) {
10534 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10536 gen_helper_float_chs_d(fp0
, fp0
);
10538 gen_store_fpr64(ctx
, fp0
, fd
);
10539 tcg_temp_free_i64(fp0
);
10542 case OPC_ROUND_L_D
:
10543 check_cp1_64bitmode(ctx
);
10545 TCGv_i64 fp0
= tcg_temp_new_i64();
10547 gen_load_fpr64(ctx
, fp0
, fs
);
10548 if (ctx
->nan2008
) {
10549 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10551 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10553 gen_store_fpr64(ctx
, fp0
, fd
);
10554 tcg_temp_free_i64(fp0
);
10557 case OPC_TRUNC_L_D
:
10558 check_cp1_64bitmode(ctx
);
10560 TCGv_i64 fp0
= tcg_temp_new_i64();
10562 gen_load_fpr64(ctx
, fp0
, fs
);
10563 if (ctx
->nan2008
) {
10564 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10566 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10568 gen_store_fpr64(ctx
, fp0
, fd
);
10569 tcg_temp_free_i64(fp0
);
10573 check_cp1_64bitmode(ctx
);
10575 TCGv_i64 fp0
= tcg_temp_new_i64();
10577 gen_load_fpr64(ctx
, fp0
, fs
);
10578 if (ctx
->nan2008
) {
10579 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10581 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10583 gen_store_fpr64(ctx
, fp0
, fd
);
10584 tcg_temp_free_i64(fp0
);
10587 case OPC_FLOOR_L_D
:
10588 check_cp1_64bitmode(ctx
);
10590 TCGv_i64 fp0
= tcg_temp_new_i64();
10592 gen_load_fpr64(ctx
, fp0
, fs
);
10593 if (ctx
->nan2008
) {
10594 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10596 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10598 gen_store_fpr64(ctx
, fp0
, fd
);
10599 tcg_temp_free_i64(fp0
);
10602 case OPC_ROUND_W_D
:
10603 check_cp1_registers(ctx
, fs
);
10605 TCGv_i32 fp32
= tcg_temp_new_i32();
10606 TCGv_i64 fp64
= tcg_temp_new_i64();
10608 gen_load_fpr64(ctx
, fp64
, fs
);
10609 if (ctx
->nan2008
) {
10610 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10612 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10614 tcg_temp_free_i64(fp64
);
10615 gen_store_fpr32(ctx
, fp32
, fd
);
10616 tcg_temp_free_i32(fp32
);
10619 case OPC_TRUNC_W_D
:
10620 check_cp1_registers(ctx
, fs
);
10622 TCGv_i32 fp32
= tcg_temp_new_i32();
10623 TCGv_i64 fp64
= tcg_temp_new_i64();
10625 gen_load_fpr64(ctx
, fp64
, fs
);
10626 if (ctx
->nan2008
) {
10627 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10629 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10631 tcg_temp_free_i64(fp64
);
10632 gen_store_fpr32(ctx
, fp32
, fd
);
10633 tcg_temp_free_i32(fp32
);
10637 check_cp1_registers(ctx
, fs
);
10639 TCGv_i32 fp32
= tcg_temp_new_i32();
10640 TCGv_i64 fp64
= tcg_temp_new_i64();
10642 gen_load_fpr64(ctx
, fp64
, fs
);
10643 if (ctx
->nan2008
) {
10644 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10646 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10648 tcg_temp_free_i64(fp64
);
10649 gen_store_fpr32(ctx
, fp32
, fd
);
10650 tcg_temp_free_i32(fp32
);
10653 case OPC_FLOOR_W_D
:
10654 check_cp1_registers(ctx
, fs
);
10656 TCGv_i32 fp32
= tcg_temp_new_i32();
10657 TCGv_i64 fp64
= tcg_temp_new_i64();
10659 gen_load_fpr64(ctx
, fp64
, fs
);
10660 if (ctx
->nan2008
) {
10661 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10663 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10665 tcg_temp_free_i64(fp64
);
10666 gen_store_fpr32(ctx
, fp32
, fd
);
10667 tcg_temp_free_i32(fp32
);
10671 check_insn(ctx
, ISA_MIPS_R6
);
10672 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10675 check_insn(ctx
, ISA_MIPS_R6
);
10676 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10679 check_insn(ctx
, ISA_MIPS_R6
);
10680 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10683 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10684 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10687 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10689 TCGLabel
*l1
= gen_new_label();
10693 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10695 fp0
= tcg_temp_new_i64();
10696 gen_load_fpr64(ctx
, fp0
, fs
);
10697 gen_store_fpr64(ctx
, fp0
, fd
);
10698 tcg_temp_free_i64(fp0
);
10703 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10705 TCGLabel
*l1
= gen_new_label();
10709 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10710 fp0
= tcg_temp_new_i64();
10711 gen_load_fpr64(ctx
, fp0
, fs
);
10712 gen_store_fpr64(ctx
, fp0
, fd
);
10713 tcg_temp_free_i64(fp0
);
10719 check_cp1_registers(ctx
, fs
| fd
);
10721 TCGv_i64 fp0
= tcg_temp_new_i64();
10723 gen_load_fpr64(ctx
, fp0
, fs
);
10724 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
10725 gen_store_fpr64(ctx
, fp0
, fd
);
10726 tcg_temp_free_i64(fp0
);
10730 check_cp1_registers(ctx
, fs
| fd
);
10732 TCGv_i64 fp0
= tcg_temp_new_i64();
10734 gen_load_fpr64(ctx
, fp0
, fs
);
10735 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
10736 gen_store_fpr64(ctx
, fp0
, fd
);
10737 tcg_temp_free_i64(fp0
);
10741 check_insn(ctx
, ISA_MIPS_R6
);
10743 TCGv_i64 fp0
= tcg_temp_new_i64();
10744 TCGv_i64 fp1
= tcg_temp_new_i64();
10745 TCGv_i64 fp2
= tcg_temp_new_i64();
10746 gen_load_fpr64(ctx
, fp0
, fs
);
10747 gen_load_fpr64(ctx
, fp1
, ft
);
10748 gen_load_fpr64(ctx
, fp2
, fd
);
10749 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10750 gen_store_fpr64(ctx
, fp2
, fd
);
10751 tcg_temp_free_i64(fp2
);
10752 tcg_temp_free_i64(fp1
);
10753 tcg_temp_free_i64(fp0
);
10757 check_insn(ctx
, ISA_MIPS_R6
);
10759 TCGv_i64 fp0
= tcg_temp_new_i64();
10760 TCGv_i64 fp1
= tcg_temp_new_i64();
10761 TCGv_i64 fp2
= tcg_temp_new_i64();
10762 gen_load_fpr64(ctx
, fp0
, fs
);
10763 gen_load_fpr64(ctx
, fp1
, ft
);
10764 gen_load_fpr64(ctx
, fp2
, fd
);
10765 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10766 gen_store_fpr64(ctx
, fp2
, fd
);
10767 tcg_temp_free_i64(fp2
);
10768 tcg_temp_free_i64(fp1
);
10769 tcg_temp_free_i64(fp0
);
10773 check_insn(ctx
, ISA_MIPS_R6
);
10775 TCGv_i64 fp0
= tcg_temp_new_i64();
10776 gen_load_fpr64(ctx
, fp0
, fs
);
10777 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
10778 gen_store_fpr64(ctx
, fp0
, fd
);
10779 tcg_temp_free_i64(fp0
);
10783 check_insn(ctx
, ISA_MIPS_R6
);
10785 TCGv_i64 fp0
= tcg_temp_new_i64();
10786 gen_load_fpr64(ctx
, fp0
, fs
);
10787 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
10788 gen_store_fpr64(ctx
, fp0
, fd
);
10789 tcg_temp_free_i64(fp0
);
10792 case OPC_MIN_D
: /* OPC_RECIP2_D */
10793 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10795 TCGv_i64 fp0
= tcg_temp_new_i64();
10796 TCGv_i64 fp1
= tcg_temp_new_i64();
10797 gen_load_fpr64(ctx
, fp0
, fs
);
10798 gen_load_fpr64(ctx
, fp1
, ft
);
10799 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
10800 gen_store_fpr64(ctx
, fp1
, fd
);
10801 tcg_temp_free_i64(fp1
);
10802 tcg_temp_free_i64(fp0
);
10805 check_cp1_64bitmode(ctx
);
10807 TCGv_i64 fp0
= tcg_temp_new_i64();
10808 TCGv_i64 fp1
= tcg_temp_new_i64();
10810 gen_load_fpr64(ctx
, fp0
, fs
);
10811 gen_load_fpr64(ctx
, fp1
, ft
);
10812 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
10813 tcg_temp_free_i64(fp1
);
10814 gen_store_fpr64(ctx
, fp0
, fd
);
10815 tcg_temp_free_i64(fp0
);
10819 case OPC_MINA_D
: /* OPC_RECIP1_D */
10820 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10822 TCGv_i64 fp0
= tcg_temp_new_i64();
10823 TCGv_i64 fp1
= tcg_temp_new_i64();
10824 gen_load_fpr64(ctx
, fp0
, fs
);
10825 gen_load_fpr64(ctx
, fp1
, ft
);
10826 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10827 gen_store_fpr64(ctx
, fp1
, fd
);
10828 tcg_temp_free_i64(fp1
);
10829 tcg_temp_free_i64(fp0
);
10832 check_cp1_64bitmode(ctx
);
10834 TCGv_i64 fp0
= tcg_temp_new_i64();
10836 gen_load_fpr64(ctx
, fp0
, fs
);
10837 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10838 gen_store_fpr64(ctx
, fp0
, fd
);
10839 tcg_temp_free_i64(fp0
);
10843 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10844 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10846 TCGv_i64 fp0
= tcg_temp_new_i64();
10847 TCGv_i64 fp1
= tcg_temp_new_i64();
10848 gen_load_fpr64(ctx
, fp0
, fs
);
10849 gen_load_fpr64(ctx
, fp1
, ft
);
10850 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10851 gen_store_fpr64(ctx
, fp1
, fd
);
10852 tcg_temp_free_i64(fp1
);
10853 tcg_temp_free_i64(fp0
);
10856 check_cp1_64bitmode(ctx
);
10858 TCGv_i64 fp0
= tcg_temp_new_i64();
10860 gen_load_fpr64(ctx
, fp0
, fs
);
10861 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10862 gen_store_fpr64(ctx
, fp0
, fd
);
10863 tcg_temp_free_i64(fp0
);
10867 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10868 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10870 TCGv_i64 fp0
= tcg_temp_new_i64();
10871 TCGv_i64 fp1
= tcg_temp_new_i64();
10872 gen_load_fpr64(ctx
, fp0
, fs
);
10873 gen_load_fpr64(ctx
, fp1
, ft
);
10874 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10875 gen_store_fpr64(ctx
, fp1
, fd
);
10876 tcg_temp_free_i64(fp1
);
10877 tcg_temp_free_i64(fp0
);
10880 check_cp1_64bitmode(ctx
);
10882 TCGv_i64 fp0
= tcg_temp_new_i64();
10883 TCGv_i64 fp1
= tcg_temp_new_i64();
10885 gen_load_fpr64(ctx
, fp0
, fs
);
10886 gen_load_fpr64(ctx
, fp1
, ft
);
10887 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10888 tcg_temp_free_i64(fp1
);
10889 gen_store_fpr64(ctx
, fp0
, fd
);
10890 tcg_temp_free_i64(fp0
);
10897 case OPC_CMP_UEQ_D
:
10898 case OPC_CMP_OLT_D
:
10899 case OPC_CMP_ULT_D
:
10900 case OPC_CMP_OLE_D
:
10901 case OPC_CMP_ULE_D
:
10903 case OPC_CMP_NGLE_D
:
10904 case OPC_CMP_SEQ_D
:
10905 case OPC_CMP_NGL_D
:
10907 case OPC_CMP_NGE_D
:
10909 case OPC_CMP_NGT_D
:
10910 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10911 if (ctx
->opcode
& (1 << 6)) {
10912 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
10914 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
10918 check_cp1_registers(ctx
, fs
);
10920 TCGv_i32 fp32
= tcg_temp_new_i32();
10921 TCGv_i64 fp64
= tcg_temp_new_i64();
10923 gen_load_fpr64(ctx
, fp64
, fs
);
10924 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10925 tcg_temp_free_i64(fp64
);
10926 gen_store_fpr32(ctx
, fp32
, fd
);
10927 tcg_temp_free_i32(fp32
);
10931 check_cp1_registers(ctx
, fs
);
10933 TCGv_i32 fp32
= tcg_temp_new_i32();
10934 TCGv_i64 fp64
= tcg_temp_new_i64();
10936 gen_load_fpr64(ctx
, fp64
, fs
);
10937 if (ctx
->nan2008
) {
10938 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10940 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10942 tcg_temp_free_i64(fp64
);
10943 gen_store_fpr32(ctx
, fp32
, fd
);
10944 tcg_temp_free_i32(fp32
);
10948 check_cp1_64bitmode(ctx
);
10950 TCGv_i64 fp0
= tcg_temp_new_i64();
10952 gen_load_fpr64(ctx
, fp0
, fs
);
10953 if (ctx
->nan2008
) {
10954 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10956 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10958 gen_store_fpr64(ctx
, fp0
, fd
);
10959 tcg_temp_free_i64(fp0
);
10964 TCGv_i32 fp0
= tcg_temp_new_i32();
10966 gen_load_fpr32(ctx
, fp0
, fs
);
10967 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10968 gen_store_fpr32(ctx
, fp0
, fd
);
10969 tcg_temp_free_i32(fp0
);
10973 check_cp1_registers(ctx
, fd
);
10975 TCGv_i32 fp32
= tcg_temp_new_i32();
10976 TCGv_i64 fp64
= tcg_temp_new_i64();
10978 gen_load_fpr32(ctx
, fp32
, fs
);
10979 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10980 tcg_temp_free_i32(fp32
);
10981 gen_store_fpr64(ctx
, fp64
, fd
);
10982 tcg_temp_free_i64(fp64
);
10986 check_cp1_64bitmode(ctx
);
10988 TCGv_i32 fp32
= tcg_temp_new_i32();
10989 TCGv_i64 fp64
= tcg_temp_new_i64();
10991 gen_load_fpr64(ctx
, fp64
, fs
);
10992 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10993 tcg_temp_free_i64(fp64
);
10994 gen_store_fpr32(ctx
, fp32
, fd
);
10995 tcg_temp_free_i32(fp32
);
10999 check_cp1_64bitmode(ctx
);
11001 TCGv_i64 fp0
= tcg_temp_new_i64();
11003 gen_load_fpr64(ctx
, fp0
, fs
);
11004 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11005 gen_store_fpr64(ctx
, fp0
, fd
);
11006 tcg_temp_free_i64(fp0
);
11009 case OPC_CVT_PS_PW
:
11012 TCGv_i64 fp0
= tcg_temp_new_i64();
11014 gen_load_fpr64(ctx
, fp0
, fs
);
11015 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11016 gen_store_fpr64(ctx
, fp0
, fd
);
11017 tcg_temp_free_i64(fp0
);
11023 TCGv_i64 fp0
= tcg_temp_new_i64();
11024 TCGv_i64 fp1
= tcg_temp_new_i64();
11026 gen_load_fpr64(ctx
, fp0
, fs
);
11027 gen_load_fpr64(ctx
, fp1
, ft
);
11028 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11029 tcg_temp_free_i64(fp1
);
11030 gen_store_fpr64(ctx
, fp0
, fd
);
11031 tcg_temp_free_i64(fp0
);
11037 TCGv_i64 fp0
= tcg_temp_new_i64();
11038 TCGv_i64 fp1
= tcg_temp_new_i64();
11040 gen_load_fpr64(ctx
, fp0
, fs
);
11041 gen_load_fpr64(ctx
, fp1
, ft
);
11042 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11043 tcg_temp_free_i64(fp1
);
11044 gen_store_fpr64(ctx
, fp0
, fd
);
11045 tcg_temp_free_i64(fp0
);
11051 TCGv_i64 fp0
= tcg_temp_new_i64();
11052 TCGv_i64 fp1
= tcg_temp_new_i64();
11054 gen_load_fpr64(ctx
, fp0
, fs
);
11055 gen_load_fpr64(ctx
, fp1
, ft
);
11056 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11057 tcg_temp_free_i64(fp1
);
11058 gen_store_fpr64(ctx
, fp0
, fd
);
11059 tcg_temp_free_i64(fp0
);
11065 TCGv_i64 fp0
= tcg_temp_new_i64();
11067 gen_load_fpr64(ctx
, fp0
, fs
);
11068 gen_helper_float_abs_ps(fp0
, fp0
);
11069 gen_store_fpr64(ctx
, fp0
, fd
);
11070 tcg_temp_free_i64(fp0
);
11076 TCGv_i64 fp0
= tcg_temp_new_i64();
11078 gen_load_fpr64(ctx
, fp0
, fs
);
11079 gen_store_fpr64(ctx
, fp0
, fd
);
11080 tcg_temp_free_i64(fp0
);
11086 TCGv_i64 fp0
= tcg_temp_new_i64();
11088 gen_load_fpr64(ctx
, fp0
, fs
);
11089 gen_helper_float_chs_ps(fp0
, fp0
);
11090 gen_store_fpr64(ctx
, fp0
, fd
);
11091 tcg_temp_free_i64(fp0
);
11096 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11101 TCGLabel
*l1
= gen_new_label();
11105 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11107 fp0
= tcg_temp_new_i64();
11108 gen_load_fpr64(ctx
, fp0
, fs
);
11109 gen_store_fpr64(ctx
, fp0
, fd
);
11110 tcg_temp_free_i64(fp0
);
11117 TCGLabel
*l1
= gen_new_label();
11121 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11122 fp0
= tcg_temp_new_i64();
11123 gen_load_fpr64(ctx
, fp0
, fs
);
11124 gen_store_fpr64(ctx
, fp0
, fd
);
11125 tcg_temp_free_i64(fp0
);
11133 TCGv_i64 fp0
= tcg_temp_new_i64();
11134 TCGv_i64 fp1
= tcg_temp_new_i64();
11136 gen_load_fpr64(ctx
, fp0
, ft
);
11137 gen_load_fpr64(ctx
, fp1
, fs
);
11138 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11139 tcg_temp_free_i64(fp1
);
11140 gen_store_fpr64(ctx
, fp0
, fd
);
11141 tcg_temp_free_i64(fp0
);
11147 TCGv_i64 fp0
= tcg_temp_new_i64();
11148 TCGv_i64 fp1
= tcg_temp_new_i64();
11150 gen_load_fpr64(ctx
, fp0
, ft
);
11151 gen_load_fpr64(ctx
, fp1
, fs
);
11152 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11153 tcg_temp_free_i64(fp1
);
11154 gen_store_fpr64(ctx
, fp0
, fd
);
11155 tcg_temp_free_i64(fp0
);
11158 case OPC_RECIP2_PS
:
11161 TCGv_i64 fp0
= tcg_temp_new_i64();
11162 TCGv_i64 fp1
= tcg_temp_new_i64();
11164 gen_load_fpr64(ctx
, fp0
, fs
);
11165 gen_load_fpr64(ctx
, fp1
, ft
);
11166 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11167 tcg_temp_free_i64(fp1
);
11168 gen_store_fpr64(ctx
, fp0
, fd
);
11169 tcg_temp_free_i64(fp0
);
11172 case OPC_RECIP1_PS
:
11175 TCGv_i64 fp0
= tcg_temp_new_i64();
11177 gen_load_fpr64(ctx
, fp0
, fs
);
11178 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11179 gen_store_fpr64(ctx
, fp0
, fd
);
11180 tcg_temp_free_i64(fp0
);
11183 case OPC_RSQRT1_PS
:
11186 TCGv_i64 fp0
= tcg_temp_new_i64();
11188 gen_load_fpr64(ctx
, fp0
, fs
);
11189 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11190 gen_store_fpr64(ctx
, fp0
, fd
);
11191 tcg_temp_free_i64(fp0
);
11194 case OPC_RSQRT2_PS
:
11197 TCGv_i64 fp0
= tcg_temp_new_i64();
11198 TCGv_i64 fp1
= tcg_temp_new_i64();
11200 gen_load_fpr64(ctx
, fp0
, fs
);
11201 gen_load_fpr64(ctx
, fp1
, ft
);
11202 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11203 tcg_temp_free_i64(fp1
);
11204 gen_store_fpr64(ctx
, fp0
, fd
);
11205 tcg_temp_free_i64(fp0
);
11209 check_cp1_64bitmode(ctx
);
11211 TCGv_i32 fp0
= tcg_temp_new_i32();
11213 gen_load_fpr32h(ctx
, fp0
, fs
);
11214 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11215 gen_store_fpr32(ctx
, fp0
, fd
);
11216 tcg_temp_free_i32(fp0
);
11219 case OPC_CVT_PW_PS
:
11222 TCGv_i64 fp0
= tcg_temp_new_i64();
11224 gen_load_fpr64(ctx
, fp0
, fs
);
11225 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11226 gen_store_fpr64(ctx
, fp0
, fd
);
11227 tcg_temp_free_i64(fp0
);
11231 check_cp1_64bitmode(ctx
);
11233 TCGv_i32 fp0
= tcg_temp_new_i32();
11235 gen_load_fpr32(ctx
, fp0
, fs
);
11236 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11237 gen_store_fpr32(ctx
, fp0
, fd
);
11238 tcg_temp_free_i32(fp0
);
11244 TCGv_i32 fp0
= tcg_temp_new_i32();
11245 TCGv_i32 fp1
= tcg_temp_new_i32();
11247 gen_load_fpr32(ctx
, fp0
, fs
);
11248 gen_load_fpr32(ctx
, fp1
, ft
);
11249 gen_store_fpr32h(ctx
, fp0
, fd
);
11250 gen_store_fpr32(ctx
, fp1
, fd
);
11251 tcg_temp_free_i32(fp0
);
11252 tcg_temp_free_i32(fp1
);
11258 TCGv_i32 fp0
= tcg_temp_new_i32();
11259 TCGv_i32 fp1
= tcg_temp_new_i32();
11261 gen_load_fpr32(ctx
, fp0
, fs
);
11262 gen_load_fpr32h(ctx
, fp1
, ft
);
11263 gen_store_fpr32(ctx
, fp1
, fd
);
11264 gen_store_fpr32h(ctx
, fp0
, fd
);
11265 tcg_temp_free_i32(fp0
);
11266 tcg_temp_free_i32(fp1
);
11272 TCGv_i32 fp0
= tcg_temp_new_i32();
11273 TCGv_i32 fp1
= tcg_temp_new_i32();
11275 gen_load_fpr32h(ctx
, fp0
, fs
);
11276 gen_load_fpr32(ctx
, fp1
, ft
);
11277 gen_store_fpr32(ctx
, fp1
, fd
);
11278 gen_store_fpr32h(ctx
, fp0
, fd
);
11279 tcg_temp_free_i32(fp0
);
11280 tcg_temp_free_i32(fp1
);
11286 TCGv_i32 fp0
= tcg_temp_new_i32();
11287 TCGv_i32 fp1
= tcg_temp_new_i32();
11289 gen_load_fpr32h(ctx
, fp0
, fs
);
11290 gen_load_fpr32h(ctx
, fp1
, ft
);
11291 gen_store_fpr32(ctx
, fp1
, fd
);
11292 gen_store_fpr32h(ctx
, fp0
, fd
);
11293 tcg_temp_free_i32(fp0
);
11294 tcg_temp_free_i32(fp1
);
11298 case OPC_CMP_UN_PS
:
11299 case OPC_CMP_EQ_PS
:
11300 case OPC_CMP_UEQ_PS
:
11301 case OPC_CMP_OLT_PS
:
11302 case OPC_CMP_ULT_PS
:
11303 case OPC_CMP_OLE_PS
:
11304 case OPC_CMP_ULE_PS
:
11305 case OPC_CMP_SF_PS
:
11306 case OPC_CMP_NGLE_PS
:
11307 case OPC_CMP_SEQ_PS
:
11308 case OPC_CMP_NGL_PS
:
11309 case OPC_CMP_LT_PS
:
11310 case OPC_CMP_NGE_PS
:
11311 case OPC_CMP_LE_PS
:
11312 case OPC_CMP_NGT_PS
:
11313 if (ctx
->opcode
& (1 << 6)) {
11314 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
11316 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
11320 MIPS_INVAL("farith");
11321 gen_reserved_instruction(ctx
);
11326 /* Coprocessor 3 (FPU) */
11327 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
11328 int fd
, int fs
, int base
, int index
)
11330 TCGv t0
= tcg_temp_new();
11333 gen_load_gpr(t0
, index
);
11334 } else if (index
== 0) {
11335 gen_load_gpr(t0
, base
);
11337 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11340 * Don't do NOP if destination is zero: we must perform the actual
11347 TCGv_i32 fp0
= tcg_temp_new_i32();
11349 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11350 tcg_gen_trunc_tl_i32(fp0
, t0
);
11351 gen_store_fpr32(ctx
, fp0
, fd
);
11352 tcg_temp_free_i32(fp0
);
11357 check_cp1_registers(ctx
, fd
);
11359 TCGv_i64 fp0
= tcg_temp_new_i64();
11360 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11361 gen_store_fpr64(ctx
, fp0
, fd
);
11362 tcg_temp_free_i64(fp0
);
11366 check_cp1_64bitmode(ctx
);
11367 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11369 TCGv_i64 fp0
= tcg_temp_new_i64();
11371 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11372 gen_store_fpr64(ctx
, fp0
, fd
);
11373 tcg_temp_free_i64(fp0
);
11379 TCGv_i32 fp0
= tcg_temp_new_i32();
11380 gen_load_fpr32(ctx
, fp0
, fs
);
11381 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
11382 tcg_temp_free_i32(fp0
);
11387 check_cp1_registers(ctx
, fs
);
11389 TCGv_i64 fp0
= tcg_temp_new_i64();
11390 gen_load_fpr64(ctx
, fp0
, fs
);
11391 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11392 tcg_temp_free_i64(fp0
);
11396 check_cp1_64bitmode(ctx
);
11397 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11399 TCGv_i64 fp0
= tcg_temp_new_i64();
11400 gen_load_fpr64(ctx
, fp0
, fs
);
11401 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11402 tcg_temp_free_i64(fp0
);
11409 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
11410 int fd
, int fr
, int fs
, int ft
)
11416 TCGv t0
= tcg_temp_local_new();
11417 TCGv_i32 fp
= tcg_temp_new_i32();
11418 TCGv_i32 fph
= tcg_temp_new_i32();
11419 TCGLabel
*l1
= gen_new_label();
11420 TCGLabel
*l2
= gen_new_label();
11422 gen_load_gpr(t0
, fr
);
11423 tcg_gen_andi_tl(t0
, t0
, 0x7);
11425 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
11426 gen_load_fpr32(ctx
, fp
, fs
);
11427 gen_load_fpr32h(ctx
, fph
, fs
);
11428 gen_store_fpr32(ctx
, fp
, fd
);
11429 gen_store_fpr32h(ctx
, fph
, fd
);
11432 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
11434 if (cpu_is_bigendian(ctx
)) {
11435 gen_load_fpr32(ctx
, fp
, fs
);
11436 gen_load_fpr32h(ctx
, fph
, ft
);
11437 gen_store_fpr32h(ctx
, fp
, fd
);
11438 gen_store_fpr32(ctx
, fph
, fd
);
11440 gen_load_fpr32h(ctx
, fph
, fs
);
11441 gen_load_fpr32(ctx
, fp
, ft
);
11442 gen_store_fpr32(ctx
, fph
, fd
);
11443 gen_store_fpr32h(ctx
, fp
, fd
);
11446 tcg_temp_free_i32(fp
);
11447 tcg_temp_free_i32(fph
);
11453 TCGv_i32 fp0
= tcg_temp_new_i32();
11454 TCGv_i32 fp1
= tcg_temp_new_i32();
11455 TCGv_i32 fp2
= tcg_temp_new_i32();
11457 gen_load_fpr32(ctx
, fp0
, fs
);
11458 gen_load_fpr32(ctx
, fp1
, ft
);
11459 gen_load_fpr32(ctx
, fp2
, fr
);
11460 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11461 tcg_temp_free_i32(fp0
);
11462 tcg_temp_free_i32(fp1
);
11463 gen_store_fpr32(ctx
, fp2
, fd
);
11464 tcg_temp_free_i32(fp2
);
11469 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11471 TCGv_i64 fp0
= tcg_temp_new_i64();
11472 TCGv_i64 fp1
= tcg_temp_new_i64();
11473 TCGv_i64 fp2
= tcg_temp_new_i64();
11475 gen_load_fpr64(ctx
, fp0
, fs
);
11476 gen_load_fpr64(ctx
, fp1
, ft
);
11477 gen_load_fpr64(ctx
, fp2
, fr
);
11478 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11479 tcg_temp_free_i64(fp0
);
11480 tcg_temp_free_i64(fp1
);
11481 gen_store_fpr64(ctx
, fp2
, fd
);
11482 tcg_temp_free_i64(fp2
);
11488 TCGv_i64 fp0
= tcg_temp_new_i64();
11489 TCGv_i64 fp1
= tcg_temp_new_i64();
11490 TCGv_i64 fp2
= tcg_temp_new_i64();
11492 gen_load_fpr64(ctx
, fp0
, fs
);
11493 gen_load_fpr64(ctx
, fp1
, ft
);
11494 gen_load_fpr64(ctx
, fp2
, fr
);
11495 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11496 tcg_temp_free_i64(fp0
);
11497 tcg_temp_free_i64(fp1
);
11498 gen_store_fpr64(ctx
, fp2
, fd
);
11499 tcg_temp_free_i64(fp2
);
11505 TCGv_i32 fp0
= tcg_temp_new_i32();
11506 TCGv_i32 fp1
= tcg_temp_new_i32();
11507 TCGv_i32 fp2
= tcg_temp_new_i32();
11509 gen_load_fpr32(ctx
, fp0
, fs
);
11510 gen_load_fpr32(ctx
, fp1
, ft
);
11511 gen_load_fpr32(ctx
, fp2
, fr
);
11512 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11513 tcg_temp_free_i32(fp0
);
11514 tcg_temp_free_i32(fp1
);
11515 gen_store_fpr32(ctx
, fp2
, fd
);
11516 tcg_temp_free_i32(fp2
);
11521 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11523 TCGv_i64 fp0
= tcg_temp_new_i64();
11524 TCGv_i64 fp1
= tcg_temp_new_i64();
11525 TCGv_i64 fp2
= tcg_temp_new_i64();
11527 gen_load_fpr64(ctx
, fp0
, fs
);
11528 gen_load_fpr64(ctx
, fp1
, ft
);
11529 gen_load_fpr64(ctx
, fp2
, fr
);
11530 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11531 tcg_temp_free_i64(fp0
);
11532 tcg_temp_free_i64(fp1
);
11533 gen_store_fpr64(ctx
, fp2
, fd
);
11534 tcg_temp_free_i64(fp2
);
11540 TCGv_i64 fp0
= tcg_temp_new_i64();
11541 TCGv_i64 fp1
= tcg_temp_new_i64();
11542 TCGv_i64 fp2
= tcg_temp_new_i64();
11544 gen_load_fpr64(ctx
, fp0
, fs
);
11545 gen_load_fpr64(ctx
, fp1
, ft
);
11546 gen_load_fpr64(ctx
, fp2
, fr
);
11547 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11548 tcg_temp_free_i64(fp0
);
11549 tcg_temp_free_i64(fp1
);
11550 gen_store_fpr64(ctx
, fp2
, fd
);
11551 tcg_temp_free_i64(fp2
);
11557 TCGv_i32 fp0
= tcg_temp_new_i32();
11558 TCGv_i32 fp1
= tcg_temp_new_i32();
11559 TCGv_i32 fp2
= tcg_temp_new_i32();
11561 gen_load_fpr32(ctx
, fp0
, fs
);
11562 gen_load_fpr32(ctx
, fp1
, ft
);
11563 gen_load_fpr32(ctx
, fp2
, fr
);
11564 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11565 tcg_temp_free_i32(fp0
);
11566 tcg_temp_free_i32(fp1
);
11567 gen_store_fpr32(ctx
, fp2
, fd
);
11568 tcg_temp_free_i32(fp2
);
11573 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11575 TCGv_i64 fp0
= tcg_temp_new_i64();
11576 TCGv_i64 fp1
= tcg_temp_new_i64();
11577 TCGv_i64 fp2
= tcg_temp_new_i64();
11579 gen_load_fpr64(ctx
, fp0
, fs
);
11580 gen_load_fpr64(ctx
, fp1
, ft
);
11581 gen_load_fpr64(ctx
, fp2
, fr
);
11582 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11583 tcg_temp_free_i64(fp0
);
11584 tcg_temp_free_i64(fp1
);
11585 gen_store_fpr64(ctx
, fp2
, fd
);
11586 tcg_temp_free_i64(fp2
);
11592 TCGv_i64 fp0
= tcg_temp_new_i64();
11593 TCGv_i64 fp1
= tcg_temp_new_i64();
11594 TCGv_i64 fp2
= tcg_temp_new_i64();
11596 gen_load_fpr64(ctx
, fp0
, fs
);
11597 gen_load_fpr64(ctx
, fp1
, ft
);
11598 gen_load_fpr64(ctx
, fp2
, fr
);
11599 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11600 tcg_temp_free_i64(fp0
);
11601 tcg_temp_free_i64(fp1
);
11602 gen_store_fpr64(ctx
, fp2
, fd
);
11603 tcg_temp_free_i64(fp2
);
11609 TCGv_i32 fp0
= tcg_temp_new_i32();
11610 TCGv_i32 fp1
= tcg_temp_new_i32();
11611 TCGv_i32 fp2
= tcg_temp_new_i32();
11613 gen_load_fpr32(ctx
, fp0
, fs
);
11614 gen_load_fpr32(ctx
, fp1
, ft
);
11615 gen_load_fpr32(ctx
, fp2
, fr
);
11616 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11617 tcg_temp_free_i32(fp0
);
11618 tcg_temp_free_i32(fp1
);
11619 gen_store_fpr32(ctx
, fp2
, fd
);
11620 tcg_temp_free_i32(fp2
);
11625 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11627 TCGv_i64 fp0
= tcg_temp_new_i64();
11628 TCGv_i64 fp1
= tcg_temp_new_i64();
11629 TCGv_i64 fp2
= tcg_temp_new_i64();
11631 gen_load_fpr64(ctx
, fp0
, fs
);
11632 gen_load_fpr64(ctx
, fp1
, ft
);
11633 gen_load_fpr64(ctx
, fp2
, fr
);
11634 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11635 tcg_temp_free_i64(fp0
);
11636 tcg_temp_free_i64(fp1
);
11637 gen_store_fpr64(ctx
, fp2
, fd
);
11638 tcg_temp_free_i64(fp2
);
11644 TCGv_i64 fp0
= tcg_temp_new_i64();
11645 TCGv_i64 fp1
= tcg_temp_new_i64();
11646 TCGv_i64 fp2
= tcg_temp_new_i64();
11648 gen_load_fpr64(ctx
, fp0
, fs
);
11649 gen_load_fpr64(ctx
, fp1
, ft
);
11650 gen_load_fpr64(ctx
, fp2
, fr
);
11651 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11652 tcg_temp_free_i64(fp0
);
11653 tcg_temp_free_i64(fp1
);
11654 gen_store_fpr64(ctx
, fp2
, fd
);
11655 tcg_temp_free_i64(fp2
);
11659 MIPS_INVAL("flt3_arith");
11660 gen_reserved_instruction(ctx
);
11665 void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11669 #if !defined(CONFIG_USER_ONLY)
11671 * The Linux kernel will emulate rdhwr if it's not supported natively.
11672 * Therefore only check the ISA in system mode.
11674 check_insn(ctx
, ISA_MIPS_R2
);
11676 t0
= tcg_temp_new();
11680 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
11681 gen_store_gpr(t0
, rt
);
11684 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
11685 gen_store_gpr(t0
, rt
);
11688 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11691 gen_helper_rdhwr_cc(t0
, cpu_env
);
11692 gen_store_gpr(t0
, rt
);
11694 * Break the TB to be able to take timer interrupts immediately
11695 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
11696 * we break completely out of translated code.
11698 gen_save_pc(ctx
->base
.pc_next
+ 4);
11699 ctx
->base
.is_jmp
= DISAS_EXIT
;
11702 gen_helper_rdhwr_ccres(t0
, cpu_env
);
11703 gen_store_gpr(t0
, rt
);
11706 check_insn(ctx
, ISA_MIPS_R6
);
11709 * Performance counter registers are not implemented other than
11710 * control register 0.
11712 generate_exception(ctx
, EXCP_RI
);
11714 gen_helper_rdhwr_performance(t0
, cpu_env
);
11715 gen_store_gpr(t0
, rt
);
11718 check_insn(ctx
, ISA_MIPS_R6
);
11719 gen_helper_rdhwr_xnp(t0
, cpu_env
);
11720 gen_store_gpr(t0
, rt
);
11723 #if defined(CONFIG_USER_ONLY)
11724 tcg_gen_ld_tl(t0
, cpu_env
,
11725 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11726 gen_store_gpr(t0
, rt
);
11729 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
11730 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
11731 tcg_gen_ld_tl(t0
, cpu_env
,
11732 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11733 gen_store_gpr(t0
, rt
);
11735 gen_reserved_instruction(ctx
);
11739 default: /* Invalid */
11740 MIPS_INVAL("rdhwr");
11741 gen_reserved_instruction(ctx
);
11747 static inline void clear_branch_hflags(DisasContext
*ctx
)
11749 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
11750 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
11751 save_cpu_state(ctx
, 0);
11754 * It is not safe to save ctx->hflags as hflags may be changed
11755 * in execution time by the instruction in delay / forbidden slot.
11757 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
11761 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
11763 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11764 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
11765 /* Branches completion */
11766 clear_branch_hflags(ctx
);
11767 ctx
->base
.is_jmp
= DISAS_NORETURN
;
11768 /* FIXME: Need to clear can_do_io. */
11769 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
11770 case MIPS_HFLAG_FBNSLOT
:
11771 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
11774 /* unconditional branch */
11775 if (proc_hflags
& MIPS_HFLAG_BX
) {
11776 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
11778 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11780 case MIPS_HFLAG_BL
:
11781 /* blikely taken case */
11782 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11784 case MIPS_HFLAG_BC
:
11785 /* Conditional branch */
11787 TCGLabel
*l1
= gen_new_label();
11789 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11790 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
11792 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11795 case MIPS_HFLAG_BR
:
11796 /* unconditional branch to register */
11797 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
11798 TCGv t0
= tcg_temp_new();
11799 TCGv_i32 t1
= tcg_temp_new_i32();
11801 tcg_gen_andi_tl(t0
, btarget
, 0x1);
11802 tcg_gen_trunc_tl_i32(t1
, t0
);
11804 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
11805 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
11806 tcg_gen_or_i32(hflags
, hflags
, t1
);
11807 tcg_temp_free_i32(t1
);
11809 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
11811 tcg_gen_mov_tl(cpu_PC
, btarget
);
11813 tcg_gen_lookup_and_goto_ptr();
11816 LOG_DISAS("unknown branch 0x%x\n", proc_hflags
);
11817 gen_reserved_instruction(ctx
);
11822 /* Compact Branches */
11823 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11824 int rs
, int rt
, int32_t offset
)
11826 int bcond_compute
= 0;
11827 TCGv t0
= tcg_temp_new();
11828 TCGv t1
= tcg_temp_new();
11829 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11831 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11832 #ifdef MIPS_DEBUG_DISAS
11833 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11834 "\n", ctx
->base
.pc_next
);
11836 gen_reserved_instruction(ctx
);
11840 /* Load needed operands and calculate btarget */
11842 /* compact branch */
11843 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11844 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11845 gen_load_gpr(t0
, rs
);
11846 gen_load_gpr(t1
, rt
);
11848 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11849 if (rs
<= rt
&& rs
== 0) {
11850 /* OPC_BEQZALC, OPC_BNEZALC */
11851 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11854 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11855 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11856 gen_load_gpr(t0
, rs
);
11857 gen_load_gpr(t1
, rt
);
11859 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11861 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11862 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11863 if (rs
== 0 || rs
== rt
) {
11864 /* OPC_BLEZALC, OPC_BGEZALC */
11865 /* OPC_BGTZALC, OPC_BLTZALC */
11866 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11868 gen_load_gpr(t0
, rs
);
11869 gen_load_gpr(t1
, rt
);
11871 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11875 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11880 /* OPC_BEQZC, OPC_BNEZC */
11881 gen_load_gpr(t0
, rs
);
11883 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11885 /* OPC_JIC, OPC_JIALC */
11886 TCGv tbase
= tcg_temp_new();
11887 TCGv toffset
= tcg_constant_tl(offset
);
11889 gen_load_gpr(tbase
, rt
);
11890 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11891 tcg_temp_free(tbase
);
11895 MIPS_INVAL("Compact branch/jump");
11896 gen_reserved_instruction(ctx
);
11900 if (bcond_compute
== 0) {
11901 /* Unconditional compact branch */
11904 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11907 ctx
->hflags
|= MIPS_HFLAG_BR
;
11910 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11913 ctx
->hflags
|= MIPS_HFLAG_B
;
11916 MIPS_INVAL("Compact branch/jump");
11917 gen_reserved_instruction(ctx
);
11921 /* Generating branch here as compact branches don't have delay slot */
11922 gen_branch(ctx
, 4);
11924 /* Conditional compact branch */
11925 TCGLabel
*fs
= gen_new_label();
11926 save_cpu_state(ctx
, 0);
11929 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11930 if (rs
== 0 && rt
!= 0) {
11932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11933 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11935 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11938 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11941 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11942 if (rs
== 0 && rt
!= 0) {
11944 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11945 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11947 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11950 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11953 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11954 if (rs
== 0 && rt
!= 0) {
11956 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11957 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11959 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11962 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11965 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11966 if (rs
== 0 && rt
!= 0) {
11968 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11969 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11971 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11974 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11977 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11978 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11980 /* OPC_BOVC, OPC_BNVC */
11981 TCGv t2
= tcg_temp_new();
11982 TCGv t3
= tcg_temp_new();
11983 TCGv t4
= tcg_temp_new();
11984 TCGv input_overflow
= tcg_temp_new();
11986 gen_load_gpr(t0
, rs
);
11987 gen_load_gpr(t1
, rt
);
11988 tcg_gen_ext32s_tl(t2
, t0
);
11989 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11990 tcg_gen_ext32s_tl(t3
, t1
);
11991 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11992 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11994 tcg_gen_add_tl(t4
, t2
, t3
);
11995 tcg_gen_ext32s_tl(t4
, t4
);
11996 tcg_gen_xor_tl(t2
, t2
, t3
);
11997 tcg_gen_xor_tl(t3
, t4
, t3
);
11998 tcg_gen_andc_tl(t2
, t3
, t2
);
11999 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
12000 tcg_gen_or_tl(t4
, t4
, input_overflow
);
12001 if (opc
== OPC_BOVC
) {
12003 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12006 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12008 tcg_temp_free(input_overflow
);
12012 } else if (rs
< rt
&& rs
== 0) {
12013 /* OPC_BEQZALC, OPC_BNEZALC */
12014 if (opc
== OPC_BEQZALC
) {
12016 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12019 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12022 /* OPC_BEQC, OPC_BNEC */
12023 if (opc
== OPC_BEQC
) {
12025 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12028 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12033 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12036 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12039 MIPS_INVAL("Compact conditional branch/jump");
12040 gen_reserved_instruction(ctx
);
12044 /* Generating branch here as compact branches don't have delay slot */
12045 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12048 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12056 void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
12057 int is_64_bit
, int extended
)
12061 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
12062 gen_reserved_instruction(ctx
);
12066 t0
= tcg_temp_new();
12068 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
12069 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
12071 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12077 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
12080 TCGv_i32 t0
= tcg_const_i32(op
);
12081 TCGv t1
= tcg_temp_new();
12082 gen_base_offset_addr(ctx
, t1
, base
, offset
);
12083 gen_helper_cache(cpu_env
, t1
, t0
);
12085 tcg_temp_free_i32(t0
);
12088 static inline bool is_uhi(int sdbbp_code
)
12090 #ifdef CONFIG_USER_ONLY
12093 return semihosting_enabled() && sdbbp_code
== 1;
12097 #ifdef CONFIG_USER_ONLY
12098 /* The above should dead-code away any calls to this..*/
12099 static inline void gen_helper_do_semihosting(void *env
)
12101 g_assert_not_reached();
12105 void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
12107 TCGv t0
= tcg_temp_new();
12108 TCGv t1
= tcg_temp_new();
12110 gen_load_gpr(t0
, base
);
12113 gen_load_gpr(t1
, index
);
12114 tcg_gen_shli_tl(t1
, t1
, 2);
12115 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12118 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12119 gen_store_gpr(t1
, rd
);
12125 static void gen_sync(int stype
)
12127 TCGBar tcg_mo
= TCG_BAR_SC
;
12130 case 0x4: /* SYNC_WMB */
12131 tcg_mo
|= TCG_MO_ST_ST
;
12133 case 0x10: /* SYNC_MB */
12134 tcg_mo
|= TCG_MO_ALL
;
12136 case 0x11: /* SYNC_ACQUIRE */
12137 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
12139 case 0x12: /* SYNC_RELEASE */
12140 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
12142 case 0x13: /* SYNC_RMB */
12143 tcg_mo
|= TCG_MO_LD_LD
;
12146 tcg_mo
|= TCG_MO_ALL
;
12150 tcg_gen_mb(tcg_mo
);
12153 /* ISA extensions (ASEs) */
12155 /* MIPS16 extension to MIPS32 */
12156 #include "mips16e_translate.c.inc"
12158 /* microMIPS extension to MIPS32/MIPS64 */
12161 * Values for microMIPS fmt field. Variable-width, depending on which
12162 * formats the instruction supports.
12181 #include "micromips_translate.c.inc"
12183 #include "nanomips_translate.c.inc"
12185 /* MIPSDSP functions. */
12186 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
12187 int rd
, int base
, int offset
)
12192 t0
= tcg_temp_new();
12195 gen_load_gpr(t0
, offset
);
12196 } else if (offset
== 0) {
12197 gen_load_gpr(t0
, base
);
12199 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
12204 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
12205 gen_store_gpr(t0
, rd
);
12208 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
12209 gen_store_gpr(t0
, rd
);
12212 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12213 gen_store_gpr(t0
, rd
);
12215 #if defined(TARGET_MIPS64)
12217 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
12218 gen_store_gpr(t0
, rd
);
12225 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12226 int ret
, int v1
, int v2
)
12232 /* Treat as NOP. */
12236 v1_t
= tcg_temp_new();
12237 v2_t
= tcg_temp_new();
12239 gen_load_gpr(v1_t
, v1
);
12240 gen_load_gpr(v2_t
, v2
);
12243 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
12244 case OPC_MULT_G_2E
:
12248 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12250 case OPC_ADDUH_R_QB
:
12251 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12254 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12256 case OPC_ADDQH_R_PH
:
12257 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12260 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12262 case OPC_ADDQH_R_W
:
12263 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12266 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12268 case OPC_SUBUH_R_QB
:
12269 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12272 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12274 case OPC_SUBQH_R_PH
:
12275 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12278 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12280 case OPC_SUBQH_R_W
:
12281 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12285 case OPC_ABSQ_S_PH_DSP
:
12287 case OPC_ABSQ_S_QB
:
12289 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
12291 case OPC_ABSQ_S_PH
:
12293 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
12297 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
12299 case OPC_PRECEQ_W_PHL
:
12301 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
12302 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12304 case OPC_PRECEQ_W_PHR
:
12306 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
12307 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
12308 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12310 case OPC_PRECEQU_PH_QBL
:
12312 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
12314 case OPC_PRECEQU_PH_QBR
:
12316 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
12318 case OPC_PRECEQU_PH_QBLA
:
12320 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
12322 case OPC_PRECEQU_PH_QBRA
:
12324 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
12326 case OPC_PRECEU_PH_QBL
:
12328 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
12330 case OPC_PRECEU_PH_QBR
:
12332 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
12334 case OPC_PRECEU_PH_QBLA
:
12336 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
12338 case OPC_PRECEU_PH_QBRA
:
12340 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
12344 case OPC_ADDU_QB_DSP
:
12348 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12350 case OPC_ADDQ_S_PH
:
12352 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12356 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12360 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12362 case OPC_ADDU_S_QB
:
12364 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12368 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12370 case OPC_ADDU_S_PH
:
12372 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12376 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12378 case OPC_SUBQ_S_PH
:
12380 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12384 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12388 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12390 case OPC_SUBU_S_QB
:
12392 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12396 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12398 case OPC_SUBU_S_PH
:
12400 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12404 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12408 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12412 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
12414 case OPC_RADDU_W_QB
:
12416 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
12420 case OPC_CMPU_EQ_QB_DSP
:
12422 case OPC_PRECR_QB_PH
:
12424 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12426 case OPC_PRECRQ_QB_PH
:
12428 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12430 case OPC_PRECR_SRA_PH_W
:
12433 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12434 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12436 tcg_temp_free_i32(sa_t
);
12439 case OPC_PRECR_SRA_R_PH_W
:
12442 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12443 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12445 tcg_temp_free_i32(sa_t
);
12448 case OPC_PRECRQ_PH_W
:
12450 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12452 case OPC_PRECRQ_RS_PH_W
:
12454 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12456 case OPC_PRECRQU_S_QB_PH
:
12458 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12462 #ifdef TARGET_MIPS64
12463 case OPC_ABSQ_S_QH_DSP
:
12465 case OPC_PRECEQ_L_PWL
:
12467 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
12469 case OPC_PRECEQ_L_PWR
:
12471 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
12473 case OPC_PRECEQ_PW_QHL
:
12475 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
12477 case OPC_PRECEQ_PW_QHR
:
12479 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
12481 case OPC_PRECEQ_PW_QHLA
:
12483 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
12485 case OPC_PRECEQ_PW_QHRA
:
12487 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
12489 case OPC_PRECEQU_QH_OBL
:
12491 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
12493 case OPC_PRECEQU_QH_OBR
:
12495 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
12497 case OPC_PRECEQU_QH_OBLA
:
12499 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
12501 case OPC_PRECEQU_QH_OBRA
:
12503 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
12505 case OPC_PRECEU_QH_OBL
:
12507 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
12509 case OPC_PRECEU_QH_OBR
:
12511 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
12513 case OPC_PRECEU_QH_OBLA
:
12515 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
12517 case OPC_PRECEU_QH_OBRA
:
12519 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
12521 case OPC_ABSQ_S_OB
:
12523 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
12525 case OPC_ABSQ_S_PW
:
12527 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
12529 case OPC_ABSQ_S_QH
:
12531 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
12535 case OPC_ADDU_OB_DSP
:
12537 case OPC_RADDU_L_OB
:
12539 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
12543 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12545 case OPC_SUBQ_S_PW
:
12547 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12551 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12553 case OPC_SUBQ_S_QH
:
12555 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12559 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12561 case OPC_SUBU_S_OB
:
12563 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12567 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12569 case OPC_SUBU_S_QH
:
12571 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12575 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12577 case OPC_SUBUH_R_OB
:
12579 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12583 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12585 case OPC_ADDQ_S_PW
:
12587 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12591 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12593 case OPC_ADDQ_S_QH
:
12595 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12599 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12601 case OPC_ADDU_S_OB
:
12603 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12607 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12609 case OPC_ADDU_S_QH
:
12611 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12615 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12617 case OPC_ADDUH_R_OB
:
12619 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12623 case OPC_CMPU_EQ_OB_DSP
:
12625 case OPC_PRECR_OB_QH
:
12627 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12629 case OPC_PRECR_SRA_QH_PW
:
12632 TCGv_i32 ret_t
= tcg_const_i32(ret
);
12633 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
12634 tcg_temp_free_i32(ret_t
);
12637 case OPC_PRECR_SRA_R_QH_PW
:
12640 TCGv_i32 sa_v
= tcg_const_i32(ret
);
12641 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
12642 tcg_temp_free_i32(sa_v
);
12645 case OPC_PRECRQ_OB_QH
:
12647 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12649 case OPC_PRECRQ_PW_L
:
12651 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
12653 case OPC_PRECRQ_QH_PW
:
12655 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
12657 case OPC_PRECRQ_RS_QH_PW
:
12659 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12661 case OPC_PRECRQU_S_OB_QH
:
12663 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12670 tcg_temp_free(v1_t
);
12671 tcg_temp_free(v2_t
);
12674 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
12675 int ret
, int v1
, int v2
)
12683 /* Treat as NOP. */
12687 t0
= tcg_temp_new();
12688 v1_t
= tcg_temp_new();
12689 v2_t
= tcg_temp_new();
12691 tcg_gen_movi_tl(t0
, v1
);
12692 gen_load_gpr(v1_t
, v1
);
12693 gen_load_gpr(v2_t
, v2
);
12696 case OPC_SHLL_QB_DSP
:
12698 op2
= MASK_SHLL_QB(ctx
->opcode
);
12702 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12706 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12710 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12714 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12716 case OPC_SHLL_S_PH
:
12718 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12720 case OPC_SHLLV_S_PH
:
12722 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12726 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12728 case OPC_SHLLV_S_W
:
12730 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12734 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
12738 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12742 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
12746 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12750 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
12752 case OPC_SHRA_R_QB
:
12754 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
12758 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12760 case OPC_SHRAV_R_QB
:
12762 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12766 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
12768 case OPC_SHRA_R_PH
:
12770 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
12774 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12776 case OPC_SHRAV_R_PH
:
12778 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12782 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
12784 case OPC_SHRAV_R_W
:
12786 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12788 default: /* Invalid */
12789 MIPS_INVAL("MASK SHLL.QB");
12790 gen_reserved_instruction(ctx
);
12795 #ifdef TARGET_MIPS64
12796 case OPC_SHLL_OB_DSP
:
12797 op2
= MASK_SHLL_OB(ctx
->opcode
);
12801 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12805 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12807 case OPC_SHLL_S_PW
:
12809 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12811 case OPC_SHLLV_S_PW
:
12813 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12817 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12821 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12825 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12829 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12831 case OPC_SHLL_S_QH
:
12833 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12835 case OPC_SHLLV_S_QH
:
12837 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12841 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
12845 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12847 case OPC_SHRA_R_OB
:
12849 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
12851 case OPC_SHRAV_R_OB
:
12853 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12857 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
12861 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12863 case OPC_SHRA_R_PW
:
12865 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
12867 case OPC_SHRAV_R_PW
:
12869 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12873 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
12877 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12879 case OPC_SHRA_R_QH
:
12881 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
12883 case OPC_SHRAV_R_QH
:
12885 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12889 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
12893 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12897 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
12901 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12903 default: /* Invalid */
12904 MIPS_INVAL("MASK SHLL.OB");
12905 gen_reserved_instruction(ctx
);
12913 tcg_temp_free(v1_t
);
12914 tcg_temp_free(v2_t
);
12917 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12918 int ret
, int v1
, int v2
, int check_ret
)
12924 if ((ret
== 0) && (check_ret
== 1)) {
12925 /* Treat as NOP. */
12929 t0
= tcg_temp_new_i32();
12930 v1_t
= tcg_temp_new();
12931 v2_t
= tcg_temp_new();
12933 tcg_gen_movi_i32(t0
, ret
);
12934 gen_load_gpr(v1_t
, v1
);
12935 gen_load_gpr(v2_t
, v2
);
12939 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
12940 * the same mask and op1.
12942 case OPC_MULT_G_2E
:
12946 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12949 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12952 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12954 case OPC_MULQ_RS_W
:
12955 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12959 case OPC_DPA_W_PH_DSP
:
12961 case OPC_DPAU_H_QBL
:
12963 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12965 case OPC_DPAU_H_QBR
:
12967 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12969 case OPC_DPSU_H_QBL
:
12971 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12973 case OPC_DPSU_H_QBR
:
12975 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12979 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12981 case OPC_DPAX_W_PH
:
12983 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12985 case OPC_DPAQ_S_W_PH
:
12987 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12989 case OPC_DPAQX_S_W_PH
:
12991 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12993 case OPC_DPAQX_SA_W_PH
:
12995 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12999 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13001 case OPC_DPSX_W_PH
:
13003 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13005 case OPC_DPSQ_S_W_PH
:
13007 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13009 case OPC_DPSQX_S_W_PH
:
13011 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13013 case OPC_DPSQX_SA_W_PH
:
13015 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13017 case OPC_MULSAQ_S_W_PH
:
13019 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13021 case OPC_DPAQ_SA_L_W
:
13023 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13025 case OPC_DPSQ_SA_L_W
:
13027 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13029 case OPC_MAQ_S_W_PHL
:
13031 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13033 case OPC_MAQ_S_W_PHR
:
13035 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13037 case OPC_MAQ_SA_W_PHL
:
13039 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13041 case OPC_MAQ_SA_W_PHR
:
13043 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13045 case OPC_MULSA_W_PH
:
13047 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13051 #ifdef TARGET_MIPS64
13052 case OPC_DPAQ_W_QH_DSP
:
13054 int ac
= ret
& 0x03;
13055 tcg_gen_movi_i32(t0
, ac
);
13060 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
13064 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
13068 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
13072 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
13076 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13078 case OPC_DPAQ_S_W_QH
:
13080 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13082 case OPC_DPAQ_SA_L_PW
:
13084 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13086 case OPC_DPAU_H_OBL
:
13088 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13090 case OPC_DPAU_H_OBR
:
13092 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13096 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13098 case OPC_DPSQ_S_W_QH
:
13100 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13102 case OPC_DPSQ_SA_L_PW
:
13104 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13106 case OPC_DPSU_H_OBL
:
13108 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13110 case OPC_DPSU_H_OBR
:
13112 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13114 case OPC_MAQ_S_L_PWL
:
13116 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
13118 case OPC_MAQ_S_L_PWR
:
13120 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
13122 case OPC_MAQ_S_W_QHLL
:
13124 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13126 case OPC_MAQ_SA_W_QHLL
:
13128 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13130 case OPC_MAQ_S_W_QHLR
:
13132 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13134 case OPC_MAQ_SA_W_QHLR
:
13136 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13138 case OPC_MAQ_S_W_QHRL
:
13140 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13142 case OPC_MAQ_SA_W_QHRL
:
13144 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13146 case OPC_MAQ_S_W_QHRR
:
13148 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13150 case OPC_MAQ_SA_W_QHRR
:
13152 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13154 case OPC_MULSAQ_S_L_PW
:
13156 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13158 case OPC_MULSAQ_S_W_QH
:
13160 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13166 case OPC_ADDU_QB_DSP
:
13168 case OPC_MULEU_S_PH_QBL
:
13170 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13172 case OPC_MULEU_S_PH_QBR
:
13174 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13176 case OPC_MULQ_RS_PH
:
13178 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13180 case OPC_MULEQ_S_W_PHL
:
13182 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13184 case OPC_MULEQ_S_W_PHR
:
13186 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13188 case OPC_MULQ_S_PH
:
13190 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13194 #ifdef TARGET_MIPS64
13195 case OPC_ADDU_OB_DSP
:
13197 case OPC_MULEQ_S_PW_QHL
:
13199 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13201 case OPC_MULEQ_S_PW_QHR
:
13203 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13205 case OPC_MULEU_S_QH_OBL
:
13207 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13209 case OPC_MULEU_S_QH_OBR
:
13211 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13213 case OPC_MULQ_RS_QH
:
13215 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13222 tcg_temp_free_i32(t0
);
13223 tcg_temp_free(v1_t
);
13224 tcg_temp_free(v2_t
);
13227 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13235 /* Treat as NOP. */
13239 t0
= tcg_temp_new();
13240 val_t
= tcg_temp_new();
13241 gen_load_gpr(val_t
, val
);
13244 case OPC_ABSQ_S_PH_DSP
:
13248 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
13253 target_long result
;
13254 imm
= (ctx
->opcode
>> 16) & 0xFF;
13255 result
= (uint32_t)imm
<< 24 |
13256 (uint32_t)imm
<< 16 |
13257 (uint32_t)imm
<< 8 |
13259 result
= (int32_t)result
;
13260 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
13265 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13266 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13267 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13268 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13269 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13270 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13275 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13276 imm
= (int16_t)(imm
<< 6) >> 6;
13277 tcg_gen_movi_tl(cpu_gpr
[ret
], \
13278 (target_long
)((int32_t)imm
<< 16 | \
13284 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13285 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13286 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13287 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13291 #ifdef TARGET_MIPS64
13292 case OPC_ABSQ_S_QH_DSP
:
13299 imm
= (ctx
->opcode
>> 16) & 0xFF;
13300 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
13301 temp
= (temp
<< 16) | temp
;
13302 temp
= (temp
<< 32) | temp
;
13303 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13311 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13312 imm
= (int16_t)(imm
<< 6) >> 6;
13313 temp
= ((target_long
)imm
<< 32) \
13314 | ((target_long
)imm
& 0xFFFFFFFF);
13315 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13323 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13324 imm
= (int16_t)(imm
<< 6) >> 6;
13326 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
13327 ((uint64_t)(uint16_t)imm
<< 32) |
13328 ((uint64_t)(uint16_t)imm
<< 16) |
13329 (uint64_t)(uint16_t)imm
;
13330 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13335 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13336 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13337 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13338 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13339 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13340 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13341 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13345 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
13346 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13347 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13351 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13352 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13353 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13354 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13355 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13362 tcg_temp_free(val_t
);
13365 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
13366 uint32_t op1
, uint32_t op2
,
13367 int ret
, int v1
, int v2
, int check_ret
)
13373 if ((ret
== 0) && (check_ret
== 1)) {
13374 /* Treat as NOP. */
13378 t1
= tcg_temp_new();
13379 v1_t
= tcg_temp_new();
13380 v2_t
= tcg_temp_new();
13382 gen_load_gpr(v1_t
, v1
);
13383 gen_load_gpr(v2_t
, v2
);
13386 case OPC_CMPU_EQ_QB_DSP
:
13388 case OPC_CMPU_EQ_QB
:
13390 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
13392 case OPC_CMPU_LT_QB
:
13394 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
13396 case OPC_CMPU_LE_QB
:
13398 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
13400 case OPC_CMPGU_EQ_QB
:
13402 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13404 case OPC_CMPGU_LT_QB
:
13406 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13408 case OPC_CMPGU_LE_QB
:
13410 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13412 case OPC_CMPGDU_EQ_QB
:
13414 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
13415 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13416 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13417 tcg_gen_shli_tl(t1
, t1
, 24);
13418 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13420 case OPC_CMPGDU_LT_QB
:
13422 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
13423 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13424 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13425 tcg_gen_shli_tl(t1
, t1
, 24);
13426 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13428 case OPC_CMPGDU_LE_QB
:
13430 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
13431 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13432 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13433 tcg_gen_shli_tl(t1
, t1
, 24);
13434 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13436 case OPC_CMP_EQ_PH
:
13438 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
13440 case OPC_CMP_LT_PH
:
13442 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
13444 case OPC_CMP_LE_PH
:
13446 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
13450 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13454 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13456 case OPC_PACKRL_PH
:
13458 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13462 #ifdef TARGET_MIPS64
13463 case OPC_CMPU_EQ_OB_DSP
:
13465 case OPC_CMP_EQ_PW
:
13467 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
13469 case OPC_CMP_LT_PW
:
13471 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
13473 case OPC_CMP_LE_PW
:
13475 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
13477 case OPC_CMP_EQ_QH
:
13479 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
13481 case OPC_CMP_LT_QH
:
13483 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
13485 case OPC_CMP_LE_QH
:
13487 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
13489 case OPC_CMPGDU_EQ_OB
:
13491 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13493 case OPC_CMPGDU_LT_OB
:
13495 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13497 case OPC_CMPGDU_LE_OB
:
13499 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13501 case OPC_CMPGU_EQ_OB
:
13503 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13505 case OPC_CMPGU_LT_OB
:
13507 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13509 case OPC_CMPGU_LE_OB
:
13511 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13513 case OPC_CMPU_EQ_OB
:
13515 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
13517 case OPC_CMPU_LT_OB
:
13519 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
13521 case OPC_CMPU_LE_OB
:
13523 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
13525 case OPC_PACKRL_PW
:
13527 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
13531 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13535 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13539 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13547 tcg_temp_free(v1_t
);
13548 tcg_temp_free(v2_t
);
13551 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
13552 uint32_t op1
, int rt
, int rs
, int sa
)
13559 /* Treat as NOP. */
13563 t0
= tcg_temp_new();
13564 gen_load_gpr(t0
, rs
);
13567 case OPC_APPEND_DSP
:
13568 switch (MASK_APPEND(ctx
->opcode
)) {
13571 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
13573 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13577 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13578 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13579 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
13580 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13582 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13586 if (sa
!= 0 && sa
!= 2) {
13587 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13588 tcg_gen_ext32u_tl(t0
, t0
);
13589 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
13590 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13592 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13594 default: /* Invalid */
13595 MIPS_INVAL("MASK APPEND");
13596 gen_reserved_instruction(ctx
);
13600 #ifdef TARGET_MIPS64
13601 case OPC_DAPPEND_DSP
:
13602 switch (MASK_DAPPEND(ctx
->opcode
)) {
13605 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
13609 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
13610 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
13611 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
13615 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13616 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
13617 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13622 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
13623 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13624 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
13625 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13628 default: /* Invalid */
13629 MIPS_INVAL("MASK DAPPEND");
13630 gen_reserved_instruction(ctx
);
13639 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13640 int ret
, int v1
, int v2
, int check_ret
)
13648 if ((ret
== 0) && (check_ret
== 1)) {
13649 /* Treat as NOP. */
13653 t0
= tcg_temp_new();
13654 t1
= tcg_temp_new();
13655 v1_t
= tcg_temp_new();
13657 gen_load_gpr(v1_t
, v1
);
13660 case OPC_EXTR_W_DSP
:
13664 tcg_gen_movi_tl(t0
, v2
);
13665 tcg_gen_movi_tl(t1
, v1
);
13666 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13669 tcg_gen_movi_tl(t0
, v2
);
13670 tcg_gen_movi_tl(t1
, v1
);
13671 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13673 case OPC_EXTR_RS_W
:
13674 tcg_gen_movi_tl(t0
, v2
);
13675 tcg_gen_movi_tl(t1
, v1
);
13676 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13679 tcg_gen_movi_tl(t0
, v2
);
13680 tcg_gen_movi_tl(t1
, v1
);
13681 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13683 case OPC_EXTRV_S_H
:
13684 tcg_gen_movi_tl(t0
, v2
);
13685 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13688 tcg_gen_movi_tl(t0
, v2
);
13689 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13691 case OPC_EXTRV_R_W
:
13692 tcg_gen_movi_tl(t0
, v2
);
13693 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13695 case OPC_EXTRV_RS_W
:
13696 tcg_gen_movi_tl(t0
, v2
);
13697 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13700 tcg_gen_movi_tl(t0
, v2
);
13701 tcg_gen_movi_tl(t1
, v1
);
13702 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13705 tcg_gen_movi_tl(t0
, v2
);
13706 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13709 tcg_gen_movi_tl(t0
, v2
);
13710 tcg_gen_movi_tl(t1
, v1
);
13711 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13714 tcg_gen_movi_tl(t0
, v2
);
13715 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13718 imm
= (ctx
->opcode
>> 20) & 0x3F;
13719 tcg_gen_movi_tl(t0
, ret
);
13720 tcg_gen_movi_tl(t1
, imm
);
13721 gen_helper_shilo(t0
, t1
, cpu_env
);
13724 tcg_gen_movi_tl(t0
, ret
);
13725 gen_helper_shilo(t0
, v1_t
, cpu_env
);
13728 tcg_gen_movi_tl(t0
, ret
);
13729 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
13732 imm
= (ctx
->opcode
>> 11) & 0x3FF;
13733 tcg_gen_movi_tl(t0
, imm
);
13734 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
13737 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13738 tcg_gen_movi_tl(t0
, imm
);
13739 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
13743 #ifdef TARGET_MIPS64
13744 case OPC_DEXTR_W_DSP
:
13748 tcg_gen_movi_tl(t0
, ret
);
13749 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
13753 int shift
= (ctx
->opcode
>> 19) & 0x7F;
13754 int ac
= (ctx
->opcode
>> 11) & 0x03;
13755 tcg_gen_movi_tl(t0
, shift
);
13756 tcg_gen_movi_tl(t1
, ac
);
13757 gen_helper_dshilo(t0
, t1
, cpu_env
);
13762 int ac
= (ctx
->opcode
>> 11) & 0x03;
13763 tcg_gen_movi_tl(t0
, ac
);
13764 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
13768 tcg_gen_movi_tl(t0
, v2
);
13769 tcg_gen_movi_tl(t1
, v1
);
13771 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13774 tcg_gen_movi_tl(t0
, v2
);
13775 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13778 tcg_gen_movi_tl(t0
, v2
);
13779 tcg_gen_movi_tl(t1
, v1
);
13780 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13783 tcg_gen_movi_tl(t0
, v2
);
13784 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13787 tcg_gen_movi_tl(t0
, v2
);
13788 tcg_gen_movi_tl(t1
, v1
);
13789 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13791 case OPC_DEXTR_R_L
:
13792 tcg_gen_movi_tl(t0
, v2
);
13793 tcg_gen_movi_tl(t1
, v1
);
13794 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13796 case OPC_DEXTR_RS_L
:
13797 tcg_gen_movi_tl(t0
, v2
);
13798 tcg_gen_movi_tl(t1
, v1
);
13799 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13802 tcg_gen_movi_tl(t0
, v2
);
13803 tcg_gen_movi_tl(t1
, v1
);
13804 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13806 case OPC_DEXTR_R_W
:
13807 tcg_gen_movi_tl(t0
, v2
);
13808 tcg_gen_movi_tl(t1
, v1
);
13809 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13811 case OPC_DEXTR_RS_W
:
13812 tcg_gen_movi_tl(t0
, v2
);
13813 tcg_gen_movi_tl(t1
, v1
);
13814 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13816 case OPC_DEXTR_S_H
:
13817 tcg_gen_movi_tl(t0
, v2
);
13818 tcg_gen_movi_tl(t1
, v1
);
13819 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13821 case OPC_DEXTRV_S_H
:
13822 tcg_gen_movi_tl(t0
, v2
);
13823 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13826 tcg_gen_movi_tl(t0
, v2
);
13827 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13829 case OPC_DEXTRV_R_L
:
13830 tcg_gen_movi_tl(t0
, v2
);
13831 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13833 case OPC_DEXTRV_RS_L
:
13834 tcg_gen_movi_tl(t0
, v2
);
13835 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13838 tcg_gen_movi_tl(t0
, v2
);
13839 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13841 case OPC_DEXTRV_R_W
:
13842 tcg_gen_movi_tl(t0
, v2
);
13843 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13845 case OPC_DEXTRV_RS_W
:
13846 tcg_gen_movi_tl(t0
, v2
);
13847 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13856 tcg_temp_free(v1_t
);
13859 /* End MIPSDSP functions. */
13861 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
13863 int rs
, rt
, rd
, sa
;
13866 rs
= (ctx
->opcode
>> 21) & 0x1f;
13867 rt
= (ctx
->opcode
>> 16) & 0x1f;
13868 rd
= (ctx
->opcode
>> 11) & 0x1f;
13869 sa
= (ctx
->opcode
>> 6) & 0x1f;
13871 op1
= MASK_SPECIAL(ctx
->opcode
);
13877 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13887 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13890 MIPS_INVAL("special_r6 muldiv");
13891 gen_reserved_instruction(ctx
);
13897 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13901 if (rt
== 0 && sa
== 1) {
13903 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13904 * We need additionally to check other fields.
13906 gen_cl(ctx
, op1
, rd
, rs
);
13908 gen_reserved_instruction(ctx
);
13912 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
13913 gen_helper_do_semihosting(cpu_env
);
13915 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13916 gen_reserved_instruction(ctx
);
13918 generate_exception_end(ctx
, EXCP_DBp
);
13922 #if defined(TARGET_MIPS64)
13925 if (rt
== 0 && sa
== 1) {
13927 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13928 * We need additionally to check other fields.
13930 check_mips_64(ctx
);
13931 gen_cl(ctx
, op1
, rd
, rs
);
13933 gen_reserved_instruction(ctx
);
13941 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13951 check_mips_64(ctx
);
13952 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13955 MIPS_INVAL("special_r6 muldiv");
13956 gen_reserved_instruction(ctx
);
13961 default: /* Invalid */
13962 MIPS_INVAL("special_r6");
13963 gen_reserved_instruction(ctx
);
13968 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
13970 int rs
= extract32(ctx
->opcode
, 21, 5);
13971 int rt
= extract32(ctx
->opcode
, 16, 5);
13972 int rd
= extract32(ctx
->opcode
, 11, 5);
13973 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
13976 case OPC_MOVN
: /* Conditional move */
13978 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13980 case OPC_MFHI
: /* Move from HI/LO */
13982 gen_HILO(ctx
, op1
, 0, rd
);
13985 case OPC_MTLO
: /* Move to HI/LO */
13986 gen_HILO(ctx
, op1
, 0, rs
);
13990 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
13994 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
13996 #if defined(TARGET_MIPS64)
14001 check_insn_opc_user_only(ctx
, INSN_R5900
);
14002 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14006 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
14008 default: /* Invalid */
14009 MIPS_INVAL("special_tx79");
14010 gen_reserved_instruction(ctx
);
14015 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14020 rs
= (ctx
->opcode
>> 21) & 0x1f;
14021 rt
= (ctx
->opcode
>> 16) & 0x1f;
14022 rd
= (ctx
->opcode
>> 11) & 0x1f;
14024 op1
= MASK_SPECIAL(ctx
->opcode
);
14026 case OPC_MOVN
: /* Conditional move */
14028 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
|
14029 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
14030 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
14032 case OPC_MFHI
: /* Move from HI/LO */
14034 gen_HILO(ctx
, op1
, rs
& 3, rd
);
14037 case OPC_MTLO
: /* Move to HI/LO */
14038 gen_HILO(ctx
, op1
, rd
& 3, rs
);
14041 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
14042 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14043 check_cp1_enabled(ctx
);
14044 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
14045 (ctx
->opcode
>> 16) & 1);
14047 generate_exception_err(ctx
, EXCP_CpU
, 1);
14052 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14056 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14058 #if defined(TARGET_MIPS64)
14063 check_insn(ctx
, ISA_MIPS3
);
14064 check_mips_64(ctx
);
14065 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14069 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
14072 #ifdef MIPS_STRICT_STANDARD
14073 MIPS_INVAL("SPIM");
14074 gen_reserved_instruction(ctx
);
14076 /* Implemented as RI exception for now. */
14077 MIPS_INVAL("spim (unofficial)");
14078 gen_reserved_instruction(ctx
);
14081 default: /* Invalid */
14082 MIPS_INVAL("special_legacy");
14083 gen_reserved_instruction(ctx
);
14088 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
14090 int rs
, rt
, rd
, sa
;
14093 rs
= (ctx
->opcode
>> 21) & 0x1f;
14094 rt
= (ctx
->opcode
>> 16) & 0x1f;
14095 rd
= (ctx
->opcode
>> 11) & 0x1f;
14096 sa
= (ctx
->opcode
>> 6) & 0x1f;
14098 op1
= MASK_SPECIAL(ctx
->opcode
);
14100 case OPC_SLL
: /* Shift with immediate */
14101 if (sa
== 5 && rd
== 0 &&
14102 rs
== 0 && rt
== 0) { /* PAUSE */
14103 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
14104 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
14105 gen_reserved_instruction(ctx
);
14111 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14114 switch ((ctx
->opcode
>> 21) & 0x1f) {
14116 /* rotr is decoded as srl on non-R2 CPUs */
14117 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14122 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14125 gen_reserved_instruction(ctx
);
14133 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14135 case OPC_SLLV
: /* Shifts */
14137 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14140 switch ((ctx
->opcode
>> 6) & 0x1f) {
14142 /* rotrv is decoded as srlv on non-R2 CPUs */
14143 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14148 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14151 gen_reserved_instruction(ctx
);
14155 case OPC_SLT
: /* Set on less than */
14157 gen_slt(ctx
, op1
, rd
, rs
, rt
);
14159 case OPC_AND
: /* Logic*/
14163 gen_logic(ctx
, op1
, rd
, rs
, rt
);
14166 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
14168 case OPC_TGE
: /* Traps */
14174 check_insn(ctx
, ISA_MIPS2
);
14175 gen_trap(ctx
, op1
, rs
, rt
, -1, extract32(ctx
->opcode
, 6, 10));
14178 /* Pmon entry point, also R4010 selsl */
14179 #ifdef MIPS_STRICT_STANDARD
14180 MIPS_INVAL("PMON / selsl");
14181 gen_reserved_instruction(ctx
);
14183 gen_helper_pmon(cpu_env
, tcg_constant_i32(sa
));
14187 generate_exception_end(ctx
, EXCP_SYSCALL
);
14190 generate_exception_break(ctx
, extract32(ctx
->opcode
, 6, 20));
14193 check_insn(ctx
, ISA_MIPS2
);
14194 gen_sync(extract32(ctx
->opcode
, 6, 5));
14197 #if defined(TARGET_MIPS64)
14198 /* MIPS64 specific opcodes */
14203 check_insn(ctx
, ISA_MIPS3
);
14204 check_mips_64(ctx
);
14205 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14208 switch ((ctx
->opcode
>> 21) & 0x1f) {
14210 /* drotr is decoded as dsrl on non-R2 CPUs */
14211 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14216 check_insn(ctx
, ISA_MIPS3
);
14217 check_mips_64(ctx
);
14218 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14221 gen_reserved_instruction(ctx
);
14226 switch ((ctx
->opcode
>> 21) & 0x1f) {
14228 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
14229 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14234 check_insn(ctx
, ISA_MIPS3
);
14235 check_mips_64(ctx
);
14236 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14239 gen_reserved_instruction(ctx
);
14247 check_insn(ctx
, ISA_MIPS3
);
14248 check_mips_64(ctx
);
14249 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14253 check_insn(ctx
, ISA_MIPS3
);
14254 check_mips_64(ctx
);
14255 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14258 switch ((ctx
->opcode
>> 6) & 0x1f) {
14260 /* drotrv is decoded as dsrlv on non-R2 CPUs */
14261 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14266 check_insn(ctx
, ISA_MIPS3
);
14267 check_mips_64(ctx
);
14268 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14271 gen_reserved_instruction(ctx
);
14277 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
14278 decode_opc_special_r6(env
, ctx
);
14279 } else if (ctx
->insn_flags
& INSN_R5900
) {
14280 decode_opc_special_tx79(env
, ctx
);
14282 decode_opc_special_legacy(env
, ctx
);
14288 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14293 rs
= (ctx
->opcode
>> 21) & 0x1f;
14294 rt
= (ctx
->opcode
>> 16) & 0x1f;
14295 rd
= (ctx
->opcode
>> 11) & 0x1f;
14297 op1
= MASK_SPECIAL2(ctx
->opcode
);
14299 case OPC_MADD
: /* Multiply and add/sub */
14303 check_insn(ctx
, ISA_MIPS_R1
);
14304 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14307 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14310 case OPC_DIVU_G_2F
:
14311 case OPC_MULT_G_2F
:
14312 case OPC_MULTU_G_2F
:
14314 case OPC_MODU_G_2F
:
14315 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14316 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14320 check_insn(ctx
, ISA_MIPS_R1
);
14321 gen_cl(ctx
, op1
, rd
, rs
);
14324 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
14325 gen_helper_do_semihosting(cpu_env
);
14328 * XXX: not clear which exception should be raised
14329 * when in debug mode...
14331 check_insn(ctx
, ISA_MIPS_R1
);
14332 generate_exception_end(ctx
, EXCP_DBp
);
14335 #if defined(TARGET_MIPS64)
14338 check_insn(ctx
, ISA_MIPS_R1
);
14339 check_mips_64(ctx
);
14340 gen_cl(ctx
, op1
, rd
, rs
);
14342 case OPC_DMULT_G_2F
:
14343 case OPC_DMULTU_G_2F
:
14344 case OPC_DDIV_G_2F
:
14345 case OPC_DDIVU_G_2F
:
14346 case OPC_DMOD_G_2F
:
14347 case OPC_DMODU_G_2F
:
14348 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14349 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14352 default: /* Invalid */
14353 MIPS_INVAL("special2_legacy");
14354 gen_reserved_instruction(ctx
);
14359 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
14361 int rs
, rt
, rd
, sa
;
14365 rs
= (ctx
->opcode
>> 21) & 0x1f;
14366 rt
= (ctx
->opcode
>> 16) & 0x1f;
14367 rd
= (ctx
->opcode
>> 11) & 0x1f;
14368 sa
= (ctx
->opcode
>> 6) & 0x1f;
14369 imm
= (int16_t)ctx
->opcode
>> 7;
14371 op1
= MASK_SPECIAL3(ctx
->opcode
);
14375 /* hint codes 24-31 are reserved and signal RI */
14376 gen_reserved_instruction(ctx
);
14378 /* Treat as NOP. */
14381 check_cp0_enabled(ctx
);
14382 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14383 gen_cache_operation(ctx
, rt
, rs
, imm
);
14387 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
14390 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14395 /* Treat as NOP. */
14398 op2
= MASK_BSHFL(ctx
->opcode
);
14404 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
14407 gen_bitswap(ctx
, op2
, rd
, rt
);
14412 #ifndef CONFIG_USER_ONLY
14414 if (unlikely(ctx
->gi
<= 1)) {
14415 gen_reserved_instruction(ctx
);
14417 check_cp0_enabled(ctx
);
14418 switch ((ctx
->opcode
>> 6) & 3) {
14419 case 0: /* GINVI */
14420 /* Treat as NOP. */
14422 case 2: /* GINVT */
14423 gen_helper_0e1i(ginvt
, cpu_gpr
[rs
], extract32(ctx
->opcode
, 8, 2));
14426 gen_reserved_instruction(ctx
);
14431 #if defined(TARGET_MIPS64)
14433 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
14436 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14439 check_mips_64(ctx
);
14442 /* Treat as NOP. */
14445 op2
= MASK_DBSHFL(ctx
->opcode
);
14455 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
14458 gen_bitswap(ctx
, op2
, rd
, rt
);
14465 default: /* Invalid */
14466 MIPS_INVAL("special3_r6");
14467 gen_reserved_instruction(ctx
);
14472 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14477 rs
= (ctx
->opcode
>> 21) & 0x1f;
14478 rt
= (ctx
->opcode
>> 16) & 0x1f;
14479 rd
= (ctx
->opcode
>> 11) & 0x1f;
14481 op1
= MASK_SPECIAL3(ctx
->opcode
);
14484 case OPC_DIVU_G_2E
:
14486 case OPC_MODU_G_2E
:
14487 case OPC_MULT_G_2E
:
14488 case OPC_MULTU_G_2E
:
14490 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
14491 * the same mask and op1.
14493 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
14494 op2
= MASK_ADDUH_QB(ctx
->opcode
);
14497 case OPC_ADDUH_R_QB
:
14499 case OPC_ADDQH_R_PH
:
14501 case OPC_ADDQH_R_W
:
14503 case OPC_SUBUH_R_QB
:
14505 case OPC_SUBQH_R_PH
:
14507 case OPC_SUBQH_R_W
:
14508 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14513 case OPC_MULQ_RS_W
:
14514 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14517 MIPS_INVAL("MASK ADDUH.QB");
14518 gen_reserved_instruction(ctx
);
14521 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
14522 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14524 gen_reserved_instruction(ctx
);
14528 op2
= MASK_LX(ctx
->opcode
);
14530 #if defined(TARGET_MIPS64)
14536 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
14538 default: /* Invalid */
14539 MIPS_INVAL("MASK LX");
14540 gen_reserved_instruction(ctx
);
14544 case OPC_ABSQ_S_PH_DSP
:
14545 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
14547 case OPC_ABSQ_S_QB
:
14548 case OPC_ABSQ_S_PH
:
14550 case OPC_PRECEQ_W_PHL
:
14551 case OPC_PRECEQ_W_PHR
:
14552 case OPC_PRECEQU_PH_QBL
:
14553 case OPC_PRECEQU_PH_QBR
:
14554 case OPC_PRECEQU_PH_QBLA
:
14555 case OPC_PRECEQU_PH_QBRA
:
14556 case OPC_PRECEU_PH_QBL
:
14557 case OPC_PRECEU_PH_QBR
:
14558 case OPC_PRECEU_PH_QBLA
:
14559 case OPC_PRECEU_PH_QBRA
:
14560 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14567 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14570 MIPS_INVAL("MASK ABSQ_S.PH");
14571 gen_reserved_instruction(ctx
);
14575 case OPC_ADDU_QB_DSP
:
14576 op2
= MASK_ADDU_QB(ctx
->opcode
);
14579 case OPC_ADDQ_S_PH
:
14582 case OPC_ADDU_S_QB
:
14584 case OPC_ADDU_S_PH
:
14586 case OPC_SUBQ_S_PH
:
14589 case OPC_SUBU_S_QB
:
14591 case OPC_SUBU_S_PH
:
14595 case OPC_RADDU_W_QB
:
14596 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14598 case OPC_MULEU_S_PH_QBL
:
14599 case OPC_MULEU_S_PH_QBR
:
14600 case OPC_MULQ_RS_PH
:
14601 case OPC_MULEQ_S_W_PHL
:
14602 case OPC_MULEQ_S_W_PHR
:
14603 case OPC_MULQ_S_PH
:
14604 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14606 default: /* Invalid */
14607 MIPS_INVAL("MASK ADDU.QB");
14608 gen_reserved_instruction(ctx
);
14613 case OPC_CMPU_EQ_QB_DSP
:
14614 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
14616 case OPC_PRECR_SRA_PH_W
:
14617 case OPC_PRECR_SRA_R_PH_W
:
14618 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14620 case OPC_PRECR_QB_PH
:
14621 case OPC_PRECRQ_QB_PH
:
14622 case OPC_PRECRQ_PH_W
:
14623 case OPC_PRECRQ_RS_PH_W
:
14624 case OPC_PRECRQU_S_QB_PH
:
14625 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14627 case OPC_CMPU_EQ_QB
:
14628 case OPC_CMPU_LT_QB
:
14629 case OPC_CMPU_LE_QB
:
14630 case OPC_CMP_EQ_PH
:
14631 case OPC_CMP_LT_PH
:
14632 case OPC_CMP_LE_PH
:
14633 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14635 case OPC_CMPGU_EQ_QB
:
14636 case OPC_CMPGU_LT_QB
:
14637 case OPC_CMPGU_LE_QB
:
14638 case OPC_CMPGDU_EQ_QB
:
14639 case OPC_CMPGDU_LT_QB
:
14640 case OPC_CMPGDU_LE_QB
:
14643 case OPC_PACKRL_PH
:
14644 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14646 default: /* Invalid */
14647 MIPS_INVAL("MASK CMPU.EQ.QB");
14648 gen_reserved_instruction(ctx
);
14652 case OPC_SHLL_QB_DSP
:
14653 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14655 case OPC_DPA_W_PH_DSP
:
14656 op2
= MASK_DPA_W_PH(ctx
->opcode
);
14658 case OPC_DPAU_H_QBL
:
14659 case OPC_DPAU_H_QBR
:
14660 case OPC_DPSU_H_QBL
:
14661 case OPC_DPSU_H_QBR
:
14663 case OPC_DPAX_W_PH
:
14664 case OPC_DPAQ_S_W_PH
:
14665 case OPC_DPAQX_S_W_PH
:
14666 case OPC_DPAQX_SA_W_PH
:
14668 case OPC_DPSX_W_PH
:
14669 case OPC_DPSQ_S_W_PH
:
14670 case OPC_DPSQX_S_W_PH
:
14671 case OPC_DPSQX_SA_W_PH
:
14672 case OPC_MULSAQ_S_W_PH
:
14673 case OPC_DPAQ_SA_L_W
:
14674 case OPC_DPSQ_SA_L_W
:
14675 case OPC_MAQ_S_W_PHL
:
14676 case OPC_MAQ_S_W_PHR
:
14677 case OPC_MAQ_SA_W_PHL
:
14678 case OPC_MAQ_SA_W_PHR
:
14679 case OPC_MULSA_W_PH
:
14680 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14682 default: /* Invalid */
14683 MIPS_INVAL("MASK DPAW.PH");
14684 gen_reserved_instruction(ctx
);
14689 op2
= MASK_INSV(ctx
->opcode
);
14700 t0
= tcg_temp_new();
14701 t1
= tcg_temp_new();
14703 gen_load_gpr(t0
, rt
);
14704 gen_load_gpr(t1
, rs
);
14706 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14712 default: /* Invalid */
14713 MIPS_INVAL("MASK INSV");
14714 gen_reserved_instruction(ctx
);
14718 case OPC_APPEND_DSP
:
14719 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14721 case OPC_EXTR_W_DSP
:
14722 op2
= MASK_EXTR_W(ctx
->opcode
);
14726 case OPC_EXTR_RS_W
:
14728 case OPC_EXTRV_S_H
:
14730 case OPC_EXTRV_R_W
:
14731 case OPC_EXTRV_RS_W
:
14736 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14739 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14745 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14747 default: /* Invalid */
14748 MIPS_INVAL("MASK EXTR.W");
14749 gen_reserved_instruction(ctx
);
14753 #if defined(TARGET_MIPS64)
14754 case OPC_DDIV_G_2E
:
14755 case OPC_DDIVU_G_2E
:
14756 case OPC_DMULT_G_2E
:
14757 case OPC_DMULTU_G_2E
:
14758 case OPC_DMOD_G_2E
:
14759 case OPC_DMODU_G_2E
:
14760 check_insn(ctx
, INSN_LOONGSON2E
);
14761 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14763 case OPC_ABSQ_S_QH_DSP
:
14764 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
14766 case OPC_PRECEQ_L_PWL
:
14767 case OPC_PRECEQ_L_PWR
:
14768 case OPC_PRECEQ_PW_QHL
:
14769 case OPC_PRECEQ_PW_QHR
:
14770 case OPC_PRECEQ_PW_QHLA
:
14771 case OPC_PRECEQ_PW_QHRA
:
14772 case OPC_PRECEQU_QH_OBL
:
14773 case OPC_PRECEQU_QH_OBR
:
14774 case OPC_PRECEQU_QH_OBLA
:
14775 case OPC_PRECEQU_QH_OBRA
:
14776 case OPC_PRECEU_QH_OBL
:
14777 case OPC_PRECEU_QH_OBR
:
14778 case OPC_PRECEU_QH_OBLA
:
14779 case OPC_PRECEU_QH_OBRA
:
14780 case OPC_ABSQ_S_OB
:
14781 case OPC_ABSQ_S_PW
:
14782 case OPC_ABSQ_S_QH
:
14783 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14791 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14793 default: /* Invalid */
14794 MIPS_INVAL("MASK ABSQ_S.QH");
14795 gen_reserved_instruction(ctx
);
14799 case OPC_ADDU_OB_DSP
:
14800 op2
= MASK_ADDU_OB(ctx
->opcode
);
14802 case OPC_RADDU_L_OB
:
14804 case OPC_SUBQ_S_PW
:
14806 case OPC_SUBQ_S_QH
:
14808 case OPC_SUBU_S_OB
:
14810 case OPC_SUBU_S_QH
:
14812 case OPC_SUBUH_R_OB
:
14814 case OPC_ADDQ_S_PW
:
14816 case OPC_ADDQ_S_QH
:
14818 case OPC_ADDU_S_OB
:
14820 case OPC_ADDU_S_QH
:
14822 case OPC_ADDUH_R_OB
:
14823 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14825 case OPC_MULEQ_S_PW_QHL
:
14826 case OPC_MULEQ_S_PW_QHR
:
14827 case OPC_MULEU_S_QH_OBL
:
14828 case OPC_MULEU_S_QH_OBR
:
14829 case OPC_MULQ_RS_QH
:
14830 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14832 default: /* Invalid */
14833 MIPS_INVAL("MASK ADDU.OB");
14834 gen_reserved_instruction(ctx
);
14838 case OPC_CMPU_EQ_OB_DSP
:
14839 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
14841 case OPC_PRECR_SRA_QH_PW
:
14842 case OPC_PRECR_SRA_R_QH_PW
:
14843 /* Return value is rt. */
14844 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14846 case OPC_PRECR_OB_QH
:
14847 case OPC_PRECRQ_OB_QH
:
14848 case OPC_PRECRQ_PW_L
:
14849 case OPC_PRECRQ_QH_PW
:
14850 case OPC_PRECRQ_RS_QH_PW
:
14851 case OPC_PRECRQU_S_OB_QH
:
14852 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14854 case OPC_CMPU_EQ_OB
:
14855 case OPC_CMPU_LT_OB
:
14856 case OPC_CMPU_LE_OB
:
14857 case OPC_CMP_EQ_QH
:
14858 case OPC_CMP_LT_QH
:
14859 case OPC_CMP_LE_QH
:
14860 case OPC_CMP_EQ_PW
:
14861 case OPC_CMP_LT_PW
:
14862 case OPC_CMP_LE_PW
:
14863 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14865 case OPC_CMPGDU_EQ_OB
:
14866 case OPC_CMPGDU_LT_OB
:
14867 case OPC_CMPGDU_LE_OB
:
14868 case OPC_CMPGU_EQ_OB
:
14869 case OPC_CMPGU_LT_OB
:
14870 case OPC_CMPGU_LE_OB
:
14871 case OPC_PACKRL_PW
:
14875 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14877 default: /* Invalid */
14878 MIPS_INVAL("MASK CMPU_EQ.OB");
14879 gen_reserved_instruction(ctx
);
14883 case OPC_DAPPEND_DSP
:
14884 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14886 case OPC_DEXTR_W_DSP
:
14887 op2
= MASK_DEXTR_W(ctx
->opcode
);
14894 case OPC_DEXTR_R_L
:
14895 case OPC_DEXTR_RS_L
:
14897 case OPC_DEXTR_R_W
:
14898 case OPC_DEXTR_RS_W
:
14899 case OPC_DEXTR_S_H
:
14901 case OPC_DEXTRV_R_L
:
14902 case OPC_DEXTRV_RS_L
:
14903 case OPC_DEXTRV_S_H
:
14905 case OPC_DEXTRV_R_W
:
14906 case OPC_DEXTRV_RS_W
:
14907 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14912 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14914 default: /* Invalid */
14915 MIPS_INVAL("MASK EXTR.W");
14916 gen_reserved_instruction(ctx
);
14920 case OPC_DPAQ_W_QH_DSP
:
14921 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
14923 case OPC_DPAU_H_OBL
:
14924 case OPC_DPAU_H_OBR
:
14925 case OPC_DPSU_H_OBL
:
14926 case OPC_DPSU_H_OBR
:
14928 case OPC_DPAQ_S_W_QH
:
14930 case OPC_DPSQ_S_W_QH
:
14931 case OPC_MULSAQ_S_W_QH
:
14932 case OPC_DPAQ_SA_L_PW
:
14933 case OPC_DPSQ_SA_L_PW
:
14934 case OPC_MULSAQ_S_L_PW
:
14935 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14937 case OPC_MAQ_S_W_QHLL
:
14938 case OPC_MAQ_S_W_QHLR
:
14939 case OPC_MAQ_S_W_QHRL
:
14940 case OPC_MAQ_S_W_QHRR
:
14941 case OPC_MAQ_SA_W_QHLL
:
14942 case OPC_MAQ_SA_W_QHLR
:
14943 case OPC_MAQ_SA_W_QHRL
:
14944 case OPC_MAQ_SA_W_QHRR
:
14945 case OPC_MAQ_S_L_PWL
:
14946 case OPC_MAQ_S_L_PWR
:
14951 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14953 default: /* Invalid */
14954 MIPS_INVAL("MASK DPAQ.W.QH");
14955 gen_reserved_instruction(ctx
);
14959 case OPC_DINSV_DSP
:
14960 op2
= MASK_INSV(ctx
->opcode
);
14972 t0
= tcg_temp_new();
14973 t1
= tcg_temp_new();
14975 gen_load_gpr(t0
, rt
);
14976 gen_load_gpr(t1
, rs
);
14978 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14984 default: /* Invalid */
14985 MIPS_INVAL("MASK DINSV");
14986 gen_reserved_instruction(ctx
);
14990 case OPC_SHLL_OB_DSP
:
14991 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14994 default: /* Invalid */
14995 MIPS_INVAL("special3_legacy");
14996 gen_reserved_instruction(ctx
);
15002 #if defined(TARGET_MIPS64)
15004 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
15006 uint32_t opc
= MASK_MMI(ctx
->opcode
);
15007 int rs
= extract32(ctx
->opcode
, 21, 5);
15008 int rt
= extract32(ctx
->opcode
, 16, 5);
15009 int rd
= extract32(ctx
->opcode
, 11, 5);
15012 case MMI_OPC_MULT1
:
15013 case MMI_OPC_MULTU1
:
15015 case MMI_OPC_MADDU
:
15016 case MMI_OPC_MADD1
:
15017 case MMI_OPC_MADDU1
:
15018 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
15021 case MMI_OPC_DIVU1
:
15022 gen_div1_tx79(ctx
, opc
, rs
, rt
);
15025 MIPS_INVAL("TX79 MMI class");
15026 gen_reserved_instruction(ctx
);
15031 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
15033 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_SQ */
15037 * The TX79-specific instruction Store Quadword
15039 * +--------+-------+-------+------------------------+
15040 * | 011111 | base | rt | offset | SQ
15041 * +--------+-------+-------+------------------------+
15044 * has the same opcode as the Read Hardware Register instruction
15046 * +--------+-------+-------+-------+-------+--------+
15047 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
15048 * +--------+-------+-------+-------+-------+--------+
15051 * that is required, trapped and emulated by the Linux kernel. However, all
15052 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
15053 * offset is odd. Therefore all valid SQ instructions can execute normally.
15054 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
15055 * between SQ and RDHWR, as the Linux kernel does.
15057 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
15059 int base
= extract32(ctx
->opcode
, 21, 5);
15060 int rt
= extract32(ctx
->opcode
, 16, 5);
15061 int offset
= extract32(ctx
->opcode
, 0, 16);
15063 #ifdef CONFIG_USER_ONLY
15064 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
15065 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
15067 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
15068 int rd
= extract32(ctx
->opcode
, 11, 5);
15070 gen_rdhwr(ctx
, rt
, rd
, 0);
15075 gen_mmi_sq(ctx
, base
, rt
, offset
);
15080 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
15082 int rs
, rt
, rd
, sa
;
15086 rs
= (ctx
->opcode
>> 21) & 0x1f;
15087 rt
= (ctx
->opcode
>> 16) & 0x1f;
15088 rd
= (ctx
->opcode
>> 11) & 0x1f;
15089 sa
= (ctx
->opcode
>> 6) & 0x1f;
15090 imm
= sextract32(ctx
->opcode
, 7, 9);
15092 op1
= MASK_SPECIAL3(ctx
->opcode
);
15095 * EVA loads and stores overlap Loongson 2E instructions decoded by
15096 * decode_opc_special3_legacy(), so be careful to allow their decoding when
15109 check_cp0_enabled(ctx
);
15110 gen_ld(ctx
, op1
, rt
, rs
, imm
);
15117 check_cp0_enabled(ctx
);
15118 gen_st(ctx
, op1
, rt
, rs
, imm
);
15121 check_cp0_enabled(ctx
);
15122 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
15126 check_cp0_enabled(ctx
);
15127 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15128 gen_cache_operation(ctx
, rt
, rs
, imm
);
15132 check_cp0_enabled(ctx
);
15133 /* Treat as NOP. */
15141 check_insn(ctx
, ISA_MIPS_R2
);
15142 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15145 op2
= MASK_BSHFL(ctx
->opcode
);
15152 check_insn(ctx
, ISA_MIPS_R6
);
15153 decode_opc_special3_r6(env
, ctx
);
15156 check_insn(ctx
, ISA_MIPS_R2
);
15157 gen_bshfl(ctx
, op2
, rt
, rd
);
15161 #if defined(TARGET_MIPS64)
15168 check_insn(ctx
, ISA_MIPS_R2
);
15169 check_mips_64(ctx
);
15170 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15173 op2
= MASK_DBSHFL(ctx
->opcode
);
15184 check_insn(ctx
, ISA_MIPS_R6
);
15185 decode_opc_special3_r6(env
, ctx
);
15188 check_insn(ctx
, ISA_MIPS_R2
);
15189 check_mips_64(ctx
);
15190 op2
= MASK_DBSHFL(ctx
->opcode
);
15191 gen_bshfl(ctx
, op2
, rt
, rd
);
15197 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
15202 TCGv t0
= tcg_temp_new();
15203 TCGv t1
= tcg_temp_new();
15205 gen_load_gpr(t0
, rt
);
15206 gen_load_gpr(t1
, rs
);
15207 gen_helper_fork(t0
, t1
);
15215 TCGv t0
= tcg_temp_new();
15217 gen_load_gpr(t0
, rs
);
15218 gen_helper_yield(t0
, cpu_env
, t0
);
15219 gen_store_gpr(t0
, rd
);
15224 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15225 decode_opc_special3_r6(env
, ctx
);
15227 decode_opc_special3_legacy(env
, ctx
);
15232 static bool decode_opc_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
15235 int rs
, rt
, rd
, sa
;
15239 op
= MASK_OP_MAJOR(ctx
->opcode
);
15240 rs
= (ctx
->opcode
>> 21) & 0x1f;
15241 rt
= (ctx
->opcode
>> 16) & 0x1f;
15242 rd
= (ctx
->opcode
>> 11) & 0x1f;
15243 sa
= (ctx
->opcode
>> 6) & 0x1f;
15244 imm
= (int16_t)ctx
->opcode
;
15247 decode_opc_special(env
, ctx
);
15250 #if defined(TARGET_MIPS64)
15251 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
15252 decode_mmi(env
, ctx
);
15256 if (TARGET_LONG_BITS
== 32 && (ctx
->insn_flags
& ASE_MXU
)) {
15257 if (MASK_SPECIAL2(ctx
->opcode
) == OPC_MUL
) {
15258 gen_arith(ctx
, OPC_MUL
, rd
, rs
, rt
);
15260 decode_ase_mxu(ctx
, ctx
->opcode
);
15264 decode_opc_special2_legacy(env
, ctx
);
15267 #if defined(TARGET_MIPS64)
15268 if (ctx
->insn_flags
& INSN_R5900
) {
15269 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
15271 decode_opc_special3(env
, ctx
);
15274 decode_opc_special3(env
, ctx
);
15278 op1
= MASK_REGIMM(ctx
->opcode
);
15280 case OPC_BLTZL
: /* REGIMM branches */
15284 check_insn(ctx
, ISA_MIPS2
);
15285 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15289 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15293 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15295 /* OPC_NAL, OPC_BAL */
15296 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
15298 gen_reserved_instruction(ctx
);
15301 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15304 case OPC_TGEI
: /* REGIMM traps */
15310 check_insn(ctx
, ISA_MIPS2
);
15311 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15312 gen_trap(ctx
, op1
, rs
, -1, imm
, 0);
15315 check_insn(ctx
, ISA_MIPS_R6
);
15316 gen_reserved_instruction(ctx
);
15319 check_insn(ctx
, ISA_MIPS_R2
);
15321 * Break the TB to be able to sync copied instructions
15324 ctx
->base
.is_jmp
= DISAS_STOP
;
15326 case OPC_BPOSGE32
: /* MIPS DSP branch */
15327 #if defined(TARGET_MIPS64)
15331 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
15333 #if defined(TARGET_MIPS64)
15335 check_insn(ctx
, ISA_MIPS_R6
);
15336 check_mips_64(ctx
);
15338 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
15342 check_insn(ctx
, ISA_MIPS_R6
);
15343 check_mips_64(ctx
);
15345 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
15349 default: /* Invalid */
15350 MIPS_INVAL("regimm");
15351 gen_reserved_instruction(ctx
);
15356 check_cp0_enabled(ctx
);
15357 op1
= MASK_CP0(ctx
->opcode
);
15365 #if defined(TARGET_MIPS64)
15369 #ifndef CONFIG_USER_ONLY
15370 gen_cp0(env
, ctx
, op1
, rt
, rd
);
15371 #endif /* !CONFIG_USER_ONLY */
15389 #ifndef CONFIG_USER_ONLY
15390 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
15391 #endif /* !CONFIG_USER_ONLY */
15394 #ifndef CONFIG_USER_ONLY
15397 TCGv t0
= tcg_temp_new();
15399 op2
= MASK_MFMC0(ctx
->opcode
);
15403 gen_helper_dmt(t0
);
15404 gen_store_gpr(t0
, rt
);
15408 gen_helper_emt(t0
);
15409 gen_store_gpr(t0
, rt
);
15413 gen_helper_dvpe(t0
, cpu_env
);
15414 gen_store_gpr(t0
, rt
);
15418 gen_helper_evpe(t0
, cpu_env
);
15419 gen_store_gpr(t0
, rt
);
15422 check_insn(ctx
, ISA_MIPS_R6
);
15424 gen_helper_dvp(t0
, cpu_env
);
15425 gen_store_gpr(t0
, rt
);
15429 check_insn(ctx
, ISA_MIPS_R6
);
15431 gen_helper_evp(t0
, cpu_env
);
15432 gen_store_gpr(t0
, rt
);
15436 check_insn(ctx
, ISA_MIPS_R2
);
15437 save_cpu_state(ctx
, 1);
15438 gen_helper_di(t0
, cpu_env
);
15439 gen_store_gpr(t0
, rt
);
15441 * Stop translation as we may have switched
15442 * the execution mode.
15444 ctx
->base
.is_jmp
= DISAS_STOP
;
15447 check_insn(ctx
, ISA_MIPS_R2
);
15448 save_cpu_state(ctx
, 1);
15449 gen_helper_ei(t0
, cpu_env
);
15450 gen_store_gpr(t0
, rt
);
15452 * DISAS_STOP isn't sufficient, we need to ensure we break
15453 * out of translated code to check for pending interrupts.
15455 gen_save_pc(ctx
->base
.pc_next
+ 4);
15456 ctx
->base
.is_jmp
= DISAS_EXIT
;
15458 default: /* Invalid */
15459 MIPS_INVAL("mfmc0");
15460 gen_reserved_instruction(ctx
);
15465 #endif /* !CONFIG_USER_ONLY */
15468 check_insn(ctx
, ISA_MIPS_R2
);
15469 gen_load_srsgpr(rt
, rd
);
15472 check_insn(ctx
, ISA_MIPS_R2
);
15473 gen_store_srsgpr(rt
, rd
);
15477 gen_reserved_instruction(ctx
);
15481 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
15482 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15483 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
15484 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15487 /* Arithmetic with immediate opcode */
15488 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15492 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15494 case OPC_SLTI
: /* Set on less than with immediate opcode */
15496 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
15498 case OPC_ANDI
: /* Arithmetic with immediate opcode */
15499 case OPC_LUI
: /* OPC_AUI */
15502 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
15504 case OPC_J
: /* Jump */
15506 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15507 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15510 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
15511 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15513 gen_reserved_instruction(ctx
);
15516 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
15517 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15520 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15523 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
15524 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15526 gen_reserved_instruction(ctx
);
15529 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
15530 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15533 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15536 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
15539 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15541 check_insn(ctx
, ISA_MIPS_R6
);
15542 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
15543 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15546 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
15549 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15551 check_insn(ctx
, ISA_MIPS_R6
);
15552 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
15553 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15558 check_insn(ctx
, ISA_MIPS2
);
15559 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15563 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15565 case OPC_LL
: /* Load and stores */
15566 check_insn(ctx
, ISA_MIPS2
);
15567 if (ctx
->insn_flags
& INSN_R5900
) {
15568 check_insn_opc_user_only(ctx
, INSN_R5900
);
15579 gen_ld(ctx
, op
, rt
, rs
, imm
);
15586 gen_st(ctx
, op
, rt
, rs
, imm
);
15589 check_insn(ctx
, ISA_MIPS2
);
15590 if (ctx
->insn_flags
& INSN_R5900
) {
15591 check_insn_opc_user_only(ctx
, INSN_R5900
);
15593 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
15596 check_cp0_enabled(ctx
);
15597 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
15598 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15599 gen_cache_operation(ctx
, rt
, rs
, imm
);
15601 /* Treat as NOP. */
15604 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
| INSN_R5900
);
15605 /* Treat as NOP. */
15608 /* Floating point (COP1). */
15613 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
15617 op1
= MASK_CP1(ctx
->opcode
);
15622 check_cp1_enabled(ctx
);
15623 check_insn(ctx
, ISA_MIPS_R2
);
15629 check_cp1_enabled(ctx
);
15630 gen_cp1(ctx
, op1
, rt
, rd
);
15632 #if defined(TARGET_MIPS64)
15635 check_cp1_enabled(ctx
);
15636 check_insn(ctx
, ISA_MIPS3
);
15637 check_mips_64(ctx
);
15638 gen_cp1(ctx
, op1
, rt
, rd
);
15641 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
15642 check_cp1_enabled(ctx
);
15643 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15645 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15650 check_insn(ctx
, ASE_MIPS3D
);
15651 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15652 (rt
>> 2) & 0x7, imm
<< 2);
15656 check_cp1_enabled(ctx
);
15657 check_insn(ctx
, ISA_MIPS_R6
);
15658 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15662 check_cp1_enabled(ctx
);
15663 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15665 check_insn(ctx
, ASE_MIPS3D
);
15668 check_cp1_enabled(ctx
);
15669 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15670 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15671 (rt
>> 2) & 0x7, imm
<< 2);
15678 check_cp1_enabled(ctx
);
15679 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15685 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
15686 check_cp1_enabled(ctx
);
15687 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15689 case R6_OPC_CMP_AF_S
:
15690 case R6_OPC_CMP_UN_S
:
15691 case R6_OPC_CMP_EQ_S
:
15692 case R6_OPC_CMP_UEQ_S
:
15693 case R6_OPC_CMP_LT_S
:
15694 case R6_OPC_CMP_ULT_S
:
15695 case R6_OPC_CMP_LE_S
:
15696 case R6_OPC_CMP_ULE_S
:
15697 case R6_OPC_CMP_SAF_S
:
15698 case R6_OPC_CMP_SUN_S
:
15699 case R6_OPC_CMP_SEQ_S
:
15700 case R6_OPC_CMP_SEUQ_S
:
15701 case R6_OPC_CMP_SLT_S
:
15702 case R6_OPC_CMP_SULT_S
:
15703 case R6_OPC_CMP_SLE_S
:
15704 case R6_OPC_CMP_SULE_S
:
15705 case R6_OPC_CMP_OR_S
:
15706 case R6_OPC_CMP_UNE_S
:
15707 case R6_OPC_CMP_NE_S
:
15708 case R6_OPC_CMP_SOR_S
:
15709 case R6_OPC_CMP_SUNE_S
:
15710 case R6_OPC_CMP_SNE_S
:
15711 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15713 case R6_OPC_CMP_AF_D
:
15714 case R6_OPC_CMP_UN_D
:
15715 case R6_OPC_CMP_EQ_D
:
15716 case R6_OPC_CMP_UEQ_D
:
15717 case R6_OPC_CMP_LT_D
:
15718 case R6_OPC_CMP_ULT_D
:
15719 case R6_OPC_CMP_LE_D
:
15720 case R6_OPC_CMP_ULE_D
:
15721 case R6_OPC_CMP_SAF_D
:
15722 case R6_OPC_CMP_SUN_D
:
15723 case R6_OPC_CMP_SEQ_D
:
15724 case R6_OPC_CMP_SEUQ_D
:
15725 case R6_OPC_CMP_SLT_D
:
15726 case R6_OPC_CMP_SULT_D
:
15727 case R6_OPC_CMP_SLE_D
:
15728 case R6_OPC_CMP_SULE_D
:
15729 case R6_OPC_CMP_OR_D
:
15730 case R6_OPC_CMP_UNE_D
:
15731 case R6_OPC_CMP_NE_D
:
15732 case R6_OPC_CMP_SOR_D
:
15733 case R6_OPC_CMP_SUNE_D
:
15734 case R6_OPC_CMP_SNE_D
:
15735 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15738 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
15739 rt
, rd
, sa
, (imm
>> 8) & 0x7);
15744 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15751 gen_reserved_instruction(ctx
);
15756 /* Compact branches [R6] and COP2 [non-R6] */
15757 case OPC_BC
: /* OPC_LWC2 */
15758 case OPC_BALC
: /* OPC_SWC2 */
15759 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15760 /* OPC_BC, OPC_BALC */
15761 gen_compute_compact_branch(ctx
, op
, 0, 0,
15762 sextract32(ctx
->opcode
<< 2, 0, 28));
15763 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15764 gen_loongson_lswc2(ctx
, rt
, rs
, rd
);
15766 /* OPC_LWC2, OPC_SWC2 */
15767 /* COP2: Not implemented. */
15768 generate_exception_err(ctx
, EXCP_CpU
, 2);
15771 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
15772 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
15773 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15775 /* OPC_BEQZC, OPC_BNEZC */
15776 gen_compute_compact_branch(ctx
, op
, rs
, 0,
15777 sextract32(ctx
->opcode
<< 2, 0, 23));
15779 /* OPC_JIC, OPC_JIALC */
15780 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
15782 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15783 gen_loongson_lsdc2(ctx
, rt
, rs
, rd
);
15785 /* OPC_LWC2, OPC_SWC2 */
15786 /* COP2: Not implemented. */
15787 generate_exception_err(ctx
, EXCP_CpU
, 2);
15791 check_insn(ctx
, ASE_LMMI
);
15792 /* Note that these instructions use different fields. */
15793 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
15797 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15798 check_cp1_enabled(ctx
);
15799 op1
= MASK_CP3(ctx
->opcode
);
15803 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15809 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15810 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
15813 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15814 /* Treat as NOP. */
15817 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15831 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15832 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
15836 gen_reserved_instruction(ctx
);
15840 generate_exception_err(ctx
, EXCP_CpU
, 1);
15844 #if defined(TARGET_MIPS64)
15845 /* MIPS64 opcodes */
15847 if (ctx
->insn_flags
& INSN_R5900
) {
15848 check_insn_opc_user_only(ctx
, INSN_R5900
);
15855 check_insn(ctx
, ISA_MIPS3
);
15856 check_mips_64(ctx
);
15857 gen_ld(ctx
, op
, rt
, rs
, imm
);
15862 check_insn(ctx
, ISA_MIPS3
);
15863 check_mips_64(ctx
);
15864 gen_st(ctx
, op
, rt
, rs
, imm
);
15867 check_insn(ctx
, ISA_MIPS3
);
15868 if (ctx
->insn_flags
& INSN_R5900
) {
15869 check_insn_opc_user_only(ctx
, INSN_R5900
);
15871 check_mips_64(ctx
);
15872 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
15874 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
15875 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15876 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
15877 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15880 check_insn(ctx
, ISA_MIPS3
);
15881 check_mips_64(ctx
);
15882 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15886 check_insn(ctx
, ISA_MIPS3
);
15887 check_mips_64(ctx
);
15888 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15891 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
15892 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15893 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15895 MIPS_INVAL("major opcode");
15896 gen_reserved_instruction(ctx
);
15900 case OPC_DAUI
: /* OPC_JALX */
15901 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15902 #if defined(TARGET_MIPS64)
15904 check_mips_64(ctx
);
15906 generate_exception(ctx
, EXCP_RI
);
15907 } else if (rt
!= 0) {
15908 TCGv t0
= tcg_temp_new();
15909 gen_load_gpr(t0
, rs
);
15910 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
15914 gen_reserved_instruction(ctx
);
15915 MIPS_INVAL("major opcode");
15919 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
15920 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15921 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15925 /* MDMX: Not implemented. */
15928 check_insn(ctx
, ISA_MIPS_R6
);
15929 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
15931 default: /* Invalid */
15932 MIPS_INVAL("major opcode");
15938 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15940 /* make sure instructions are on a word boundary */
15941 if (ctx
->base
.pc_next
& 0x3) {
15942 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15943 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
15947 /* Handle blikely not taken case */
15948 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
15949 TCGLabel
*l1
= gen_new_label();
15951 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
15952 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
15953 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
15957 /* Transition to the auto-generated decoder. */
15959 /* Vendor specific extensions */
15960 if (cpu_supports_isa(env
, INSN_R5900
) && decode_ext_txx9(ctx
, ctx
->opcode
)) {
15963 if (cpu_supports_isa(env
, INSN_VR54XX
) && decode_ext_vr54xx(ctx
, ctx
->opcode
)) {
15967 /* ISA extensions */
15968 if (ase_msa_available(env
) && decode_ase_msa(ctx
, ctx
->opcode
)) {
15972 /* ISA (from latest to oldest) */
15973 if (cpu_supports_isa(env
, ISA_MIPS_R6
) && decode_isa_rel6(ctx
, ctx
->opcode
)) {
15977 if (decode_opc_legacy(env
, ctx
)) {
15981 gen_reserved_instruction(ctx
);
15984 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
15986 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
15987 CPUMIPSState
*env
= cs
->env_ptr
;
15989 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
15990 ctx
->saved_pc
= -1;
15991 ctx
->insn_flags
= env
->insn_flags
;
15992 ctx
->CP0_Config0
= env
->CP0_Config0
;
15993 ctx
->CP0_Config1
= env
->CP0_Config1
;
15994 ctx
->CP0_Config2
= env
->CP0_Config2
;
15995 ctx
->CP0_Config3
= env
->CP0_Config3
;
15996 ctx
->CP0_Config5
= env
->CP0_Config5
;
15998 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
15999 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
16000 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
16001 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
16002 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
16003 ctx
->PAMask
= env
->PAMask
;
16004 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
16005 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
16006 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
16007 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
16008 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
16009 /* Restore delay slot state from the tb context. */
16010 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
16011 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
16012 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
16013 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
16014 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
16015 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
16016 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
16017 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
16018 ctx
->mi
= (env
->CP0_Config5
>> CP0C5_MI
) & 1;
16019 ctx
->gi
= (env
->CP0_Config5
>> CP0C5_GI
) & 3;
16020 restore_cpu_state(env
, ctx
);
16021 #ifdef CONFIG_USER_ONLY
16022 ctx
->mem_idx
= MIPS_HFLAG_UM
;
16024 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
16026 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& (ISA_MIPS_R6
|
16027 INSN_LOONGSON3A
)) ? MO_UNALN
: MO_ALIGN
;
16030 * Execute a branch and its delay slot as a single instruction.
16031 * This is what GDB expects and is consistent with what the
16032 * hardware does (e.g. if a delay slot instruction faults, the
16033 * reported PC is the PC of the branch).
16035 if (ctx
->base
.singlestep_enabled
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16036 ctx
->base
.max_insns
= 2;
16039 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
16043 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16047 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16049 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16051 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
16055 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
16057 CPUMIPSState
*env
= cs
->env_ptr
;
16058 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16062 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
16063 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
16064 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16065 insn_bytes
= decode_isa_nanomips(env
, ctx
);
16066 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
16067 ctx
->opcode
= translator_ldl(env
, &ctx
->base
, ctx
->base
.pc_next
);
16069 decode_opc(env
, ctx
);
16070 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
16071 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16072 insn_bytes
= decode_isa_micromips(env
, ctx
);
16073 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
16074 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16075 insn_bytes
= decode_ase_mips16e(env
, ctx
);
16077 gen_reserved_instruction(ctx
);
16078 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
16082 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
16083 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
16084 MIPS_HFLAG_FBNSLOT
))) {
16086 * Force to generate branch as there is neither delay nor
16091 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
16092 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
16094 * Force to generate branch as microMIPS R6 doesn't restrict
16095 * branches in the forbidden slot.
16101 gen_branch(ctx
, insn_bytes
);
16103 ctx
->base
.pc_next
+= insn_bytes
;
16105 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
16110 * End the TB on (most) page crossings.
16111 * See mips_tr_init_disas_context about single-stepping a branch
16112 * together with its delay slot.
16114 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
16115 && !ctx
->base
.singlestep_enabled
) {
16116 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
16120 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
16122 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16124 switch (ctx
->base
.is_jmp
) {
16126 gen_save_pc(ctx
->base
.pc_next
);
16127 tcg_gen_lookup_and_goto_ptr();
16130 case DISAS_TOO_MANY
:
16131 save_cpu_state(ctx
, 0);
16132 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
16135 tcg_gen_exit_tb(NULL
, 0);
16137 case DISAS_NORETURN
:
16140 g_assert_not_reached();
16144 static void mips_tr_disas_log(const DisasContextBase
*dcbase
,
16145 CPUState
*cs
, FILE *logfile
)
16147 fprintf(logfile
, "IN: %s\n", lookup_symbol(dcbase
->pc_first
));
16148 target_disas(logfile
, cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
16151 static const TranslatorOps mips_tr_ops
= {
16152 .init_disas_context
= mips_tr_init_disas_context
,
16153 .tb_start
= mips_tr_tb_start
,
16154 .insn_start
= mips_tr_insn_start
,
16155 .translate_insn
= mips_tr_translate_insn
,
16156 .tb_stop
= mips_tr_tb_stop
,
16157 .disas_log
= mips_tr_disas_log
,
16160 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
16164 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
16167 void mips_tcg_init(void)
16172 for (i
= 1; i
< 32; i
++)
16173 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
16174 offsetof(CPUMIPSState
,
16177 #if defined(TARGET_MIPS64)
16178 cpu_gpr_hi
[0] = NULL
;
16180 for (unsigned i
= 1; i
< 32; i
++) {
16181 g_autofree
char *rname
= g_strdup_printf("%s[hi]", regnames
[i
]);
16183 cpu_gpr_hi
[i
] = tcg_global_mem_new_i64(cpu_env
,
16184 offsetof(CPUMIPSState
,
16185 active_tc
.gpr_hi
[i
]),
16188 #endif /* !TARGET_MIPS64 */
16189 for (i
= 0; i
< 32; i
++) {
16190 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
16192 fpu_f64
[i
] = tcg_global_mem_new_i64(cpu_env
, off
, fregnames
[i
]);
16194 msa_translate_init();
16195 cpu_PC
= tcg_global_mem_new(cpu_env
,
16196 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
16197 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
16198 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
16199 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
16201 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
16202 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
16205 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
16206 offsetof(CPUMIPSState
,
16207 active_tc
.DSPControl
),
16209 bcond
= tcg_global_mem_new(cpu_env
,
16210 offsetof(CPUMIPSState
, bcond
), "bcond");
16211 btarget
= tcg_global_mem_new(cpu_env
,
16212 offsetof(CPUMIPSState
, btarget
), "btarget");
16213 hflags
= tcg_global_mem_new_i32(cpu_env
,
16214 offsetof(CPUMIPSState
, hflags
), "hflags");
16216 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
16217 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
16219 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
16220 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
16222 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
16224 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
16227 if (TARGET_LONG_BITS
== 32) {
16228 mxu_translate_init();
16232 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
16233 target_ulong
*data
)
16235 env
->active_tc
.PC
= data
[0];
16236 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
16237 env
->hflags
|= data
[1];
16238 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
16239 case MIPS_HFLAG_BR
:
16241 case MIPS_HFLAG_BC
:
16242 case MIPS_HFLAG_BL
:
16244 env
->btarget
= data
[2];