2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
9 * Copyright (c) 2020 Philippe Mathieu-Daudé
11 * This library is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU Lesser General Public
13 * License as published by the Free Software Foundation; either
14 * version 2.1 of the License, or (at your option) any later version.
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Lesser General Public License for more details.
21 * You should have received a copy of the GNU Lesser General Public
22 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "qemu/osdep.h"
28 #include "tcg/tcg-op.h"
29 #include "exec/translator.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "semihosting/semihost.h"
35 #include "exec/translator.h"
37 #include "qemu/qemu-print.h"
38 #include "fpu_helper.h"
39 #include "translate.h"
42 * Many sysemu-only helpers are not reachable for user-only.
43 * Define stub generators here, so that we need not either sprinkle
44 * ifdefs through the translator, nor provide the helper function.
46 #define STUB_HELPER(NAME, ...) \
47 static inline void gen_helper_##NAME(__VA_ARGS__) \
48 { g_assert_not_reached(); }
50 #ifdef CONFIG_USER_ONLY
51 STUB_HELPER(cache
, TCGv_env env
, TCGv val
, TCGv_i32 reg
)
55 /* indirect opcode tables */
56 OPC_SPECIAL
= (0x00 << 26),
57 OPC_REGIMM
= (0x01 << 26),
58 OPC_CP0
= (0x10 << 26),
59 OPC_CP2
= (0x12 << 26),
60 OPC_CP3
= (0x13 << 26),
61 OPC_SPECIAL2
= (0x1C << 26),
62 OPC_SPECIAL3
= (0x1F << 26),
63 /* arithmetic with immediate */
64 OPC_ADDI
= (0x08 << 26),
65 OPC_ADDIU
= (0x09 << 26),
66 OPC_SLTI
= (0x0A << 26),
67 OPC_SLTIU
= (0x0B << 26),
68 /* logic with immediate */
69 OPC_ANDI
= (0x0C << 26),
70 OPC_ORI
= (0x0D << 26),
71 OPC_XORI
= (0x0E << 26),
72 OPC_LUI
= (0x0F << 26),
73 /* arithmetic with immediate */
74 OPC_DADDI
= (0x18 << 26),
75 OPC_DADDIU
= (0x19 << 26),
76 /* Jump and branches */
78 OPC_JAL
= (0x03 << 26),
79 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
80 OPC_BEQL
= (0x14 << 26),
81 OPC_BNE
= (0x05 << 26),
82 OPC_BNEL
= (0x15 << 26),
83 OPC_BLEZ
= (0x06 << 26),
84 OPC_BLEZL
= (0x16 << 26),
85 OPC_BGTZ
= (0x07 << 26),
86 OPC_BGTZL
= (0x17 << 26),
87 OPC_JALX
= (0x1D << 26),
88 OPC_DAUI
= (0x1D << 26),
90 OPC_LDL
= (0x1A << 26),
91 OPC_LDR
= (0x1B << 26),
92 OPC_LB
= (0x20 << 26),
93 OPC_LH
= (0x21 << 26),
94 OPC_LWL
= (0x22 << 26),
95 OPC_LW
= (0x23 << 26),
96 OPC_LWPC
= OPC_LW
| 0x5,
97 OPC_LBU
= (0x24 << 26),
98 OPC_LHU
= (0x25 << 26),
99 OPC_LWR
= (0x26 << 26),
100 OPC_LWU
= (0x27 << 26),
101 OPC_SB
= (0x28 << 26),
102 OPC_SH
= (0x29 << 26),
103 OPC_SWL
= (0x2A << 26),
104 OPC_SW
= (0x2B << 26),
105 OPC_SDL
= (0x2C << 26),
106 OPC_SDR
= (0x2D << 26),
107 OPC_SWR
= (0x2E << 26),
108 OPC_LL
= (0x30 << 26),
109 OPC_LLD
= (0x34 << 26),
110 OPC_LD
= (0x37 << 26),
111 OPC_LDPC
= OPC_LD
| 0x5,
112 OPC_SC
= (0x38 << 26),
113 OPC_SCD
= (0x3C << 26),
114 OPC_SD
= (0x3F << 26),
115 /* Floating point load/store */
116 OPC_LWC1
= (0x31 << 26),
117 OPC_LWC2
= (0x32 << 26),
118 OPC_LDC1
= (0x35 << 26),
119 OPC_LDC2
= (0x36 << 26),
120 OPC_SWC1
= (0x39 << 26),
121 OPC_SWC2
= (0x3A << 26),
122 OPC_SDC1
= (0x3D << 26),
123 OPC_SDC2
= (0x3E << 26),
124 /* Compact Branches */
125 OPC_BLEZALC
= (0x06 << 26),
126 OPC_BGEZALC
= (0x06 << 26),
127 OPC_BGEUC
= (0x06 << 26),
128 OPC_BGTZALC
= (0x07 << 26),
129 OPC_BLTZALC
= (0x07 << 26),
130 OPC_BLTUC
= (0x07 << 26),
131 OPC_BOVC
= (0x08 << 26),
132 OPC_BEQZALC
= (0x08 << 26),
133 OPC_BEQC
= (0x08 << 26),
134 OPC_BLEZC
= (0x16 << 26),
135 OPC_BGEZC
= (0x16 << 26),
136 OPC_BGEC
= (0x16 << 26),
137 OPC_BGTZC
= (0x17 << 26),
138 OPC_BLTZC
= (0x17 << 26),
139 OPC_BLTC
= (0x17 << 26),
140 OPC_BNVC
= (0x18 << 26),
141 OPC_BNEZALC
= (0x18 << 26),
142 OPC_BNEC
= (0x18 << 26),
143 OPC_BC
= (0x32 << 26),
144 OPC_BEQZC
= (0x36 << 26),
145 OPC_JIC
= (0x36 << 26),
146 OPC_BALC
= (0x3A << 26),
147 OPC_BNEZC
= (0x3E << 26),
148 OPC_JIALC
= (0x3E << 26),
149 /* MDMX ASE specific */
150 OPC_MDMX
= (0x1E << 26),
151 /* Cache and prefetch */
152 OPC_CACHE
= (0x2F << 26),
153 OPC_PREF
= (0x33 << 26),
154 /* PC-relative address computation / loads */
155 OPC_PCREL
= (0x3B << 26),
158 /* PC-relative address computation / loads */
159 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
160 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
162 /* Instructions determined by bits 19 and 20 */
163 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
164 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
165 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
167 /* Instructions determined by bits 16 ... 20 */
168 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
169 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
172 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
175 /* MIPS special opcodes */
176 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
180 OPC_SLL
= 0x00 | OPC_SPECIAL
,
181 /* NOP is SLL r0, r0, 0 */
182 /* SSNOP is SLL r0, r0, 1 */
183 /* EHB is SLL r0, r0, 3 */
184 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
185 OPC_ROTR
= OPC_SRL
| (1 << 21),
186 OPC_SRA
= 0x03 | OPC_SPECIAL
,
187 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
188 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
189 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
190 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
191 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
192 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
193 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
194 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
195 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
196 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
197 OPC_DROTR
= OPC_DSRL
| (1 << 21),
198 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
199 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
200 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
201 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
202 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
203 /* Multiplication / division */
204 OPC_MULT
= 0x18 | OPC_SPECIAL
,
205 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
206 OPC_DIV
= 0x1A | OPC_SPECIAL
,
207 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
208 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
209 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
210 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
211 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
213 /* 2 registers arithmetic / logic */
214 OPC_ADD
= 0x20 | OPC_SPECIAL
,
215 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
216 OPC_SUB
= 0x22 | OPC_SPECIAL
,
217 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
218 OPC_AND
= 0x24 | OPC_SPECIAL
,
219 OPC_OR
= 0x25 | OPC_SPECIAL
,
220 OPC_XOR
= 0x26 | OPC_SPECIAL
,
221 OPC_NOR
= 0x27 | OPC_SPECIAL
,
222 OPC_SLT
= 0x2A | OPC_SPECIAL
,
223 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
224 OPC_DADD
= 0x2C | OPC_SPECIAL
,
225 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
226 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
227 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
229 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
230 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
232 OPC_TGE
= 0x30 | OPC_SPECIAL
,
233 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
234 OPC_TLT
= 0x32 | OPC_SPECIAL
,
235 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
236 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
237 OPC_TNE
= 0x36 | OPC_SPECIAL
,
238 /* HI / LO registers load & stores */
239 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
240 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
241 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
242 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
243 /* Conditional moves */
244 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
245 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
247 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
248 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
250 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
253 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
254 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
255 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
256 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
257 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
259 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
260 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
261 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
262 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
266 * R6 Multiply and Divide instructions have the same opcode
267 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
269 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
272 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
273 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
274 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
275 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
276 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
277 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
278 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
279 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
281 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
282 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
283 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
284 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
285 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
286 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
287 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
288 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
290 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
291 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
292 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
293 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
294 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
297 /* REGIMM (rt field) opcodes */
298 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
301 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
302 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
303 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
304 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
305 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
306 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
307 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
308 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
309 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
310 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
311 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
312 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
313 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
314 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
315 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
316 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
318 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
319 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
322 /* Special2 opcodes */
323 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
326 /* Multiply & xxx operations */
327 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
328 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
329 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
330 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
331 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
333 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
334 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
335 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
336 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
337 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
338 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
339 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
340 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
341 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
342 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
343 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
344 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
346 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
347 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
348 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
349 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
351 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
354 /* Special3 opcodes */
355 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
358 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
359 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
360 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
361 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
362 OPC_INS
= 0x04 | OPC_SPECIAL3
,
363 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
364 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
365 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
366 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
367 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
368 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
369 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
370 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
371 OPC_GINV
= 0x3D | OPC_SPECIAL3
,
374 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
375 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
376 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
377 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
378 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
379 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
380 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
381 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
382 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
383 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
384 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
385 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
388 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
389 /* MIPS DSP Arithmetic */
390 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
391 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
392 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
393 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
394 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
395 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
396 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
397 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
398 /* MIPS DSP GPR-Based Shift Sub-class */
399 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
400 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
401 /* MIPS DSP Multiply Sub-class insns */
402 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
403 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
404 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
405 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
406 /* DSP Bit/Manipulation Sub-class */
407 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
408 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
409 /* MIPS DSP Append Sub-class */
410 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
411 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
412 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
413 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
414 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
417 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
418 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
419 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
420 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
421 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
422 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
423 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
424 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
425 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
426 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
427 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
428 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
429 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
430 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
431 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
432 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
435 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
436 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
437 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
438 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
439 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
440 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
443 /* Loongson EXT load/store quad word opcodes */
444 #define MASK_LOONGSON_GSLSQ(op) (MASK_OP_MAJOR(op) | (op & 0x8020))
446 OPC_GSLQ
= 0x0020 | OPC_LWC2
,
447 OPC_GSLQC1
= 0x8020 | OPC_LWC2
,
448 OPC_GSSHFL
= OPC_LWC2
,
449 OPC_GSSQ
= 0x0020 | OPC_SWC2
,
450 OPC_GSSQC1
= 0x8020 | OPC_SWC2
,
451 OPC_GSSHFS
= OPC_SWC2
,
454 /* Loongson EXT shifted load/store opcodes */
455 #define MASK_LOONGSON_GSSHFLS(op) (MASK_OP_MAJOR(op) | (op & 0xc03f))
457 OPC_GSLWLC1
= 0x4 | OPC_GSSHFL
,
458 OPC_GSLWRC1
= 0x5 | OPC_GSSHFL
,
459 OPC_GSLDLC1
= 0x6 | OPC_GSSHFL
,
460 OPC_GSLDRC1
= 0x7 | OPC_GSSHFL
,
461 OPC_GSSWLC1
= 0x4 | OPC_GSSHFS
,
462 OPC_GSSWRC1
= 0x5 | OPC_GSSHFS
,
463 OPC_GSSDLC1
= 0x6 | OPC_GSSHFS
,
464 OPC_GSSDRC1
= 0x7 | OPC_GSSHFS
,
467 /* Loongson EXT LDC2/SDC2 opcodes */
468 #define MASK_LOONGSON_LSDC2(op) (MASK_OP_MAJOR(op) | (op & 0x7))
471 OPC_GSLBX
= 0x0 | OPC_LDC2
,
472 OPC_GSLHX
= 0x1 | OPC_LDC2
,
473 OPC_GSLWX
= 0x2 | OPC_LDC2
,
474 OPC_GSLDX
= 0x3 | OPC_LDC2
,
475 OPC_GSLWXC1
= 0x6 | OPC_LDC2
,
476 OPC_GSLDXC1
= 0x7 | OPC_LDC2
,
477 OPC_GSSBX
= 0x0 | OPC_SDC2
,
478 OPC_GSSHX
= 0x1 | OPC_SDC2
,
479 OPC_GSSWX
= 0x2 | OPC_SDC2
,
480 OPC_GSSDX
= 0x3 | OPC_SDC2
,
481 OPC_GSSWXC1
= 0x6 | OPC_SDC2
,
482 OPC_GSSDXC1
= 0x7 | OPC_SDC2
,
486 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
489 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
490 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
491 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
492 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
493 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
494 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
495 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
496 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
500 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
503 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
504 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
505 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
506 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
507 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
508 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
509 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
510 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
511 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
512 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
513 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
516 /* MIPS DSP REGIMM opcodes */
518 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
519 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
522 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
525 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
526 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
527 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
528 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
531 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
533 /* MIPS DSP Arithmetic Sub-class */
534 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
535 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
536 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
537 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
538 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
539 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
540 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
541 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
542 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
543 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
544 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
545 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
546 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
547 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
548 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
549 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
550 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
551 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
552 /* MIPS DSP Multiply Sub-class insns */
553 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
554 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
555 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
556 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
557 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
558 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
561 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
562 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
564 /* MIPS DSP Arithmetic Sub-class */
565 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
566 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
567 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
568 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
569 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
570 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
571 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
572 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
573 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
574 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
575 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
576 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
577 /* MIPS DSP Multiply Sub-class insns */
578 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
579 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
580 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
581 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
584 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
586 /* MIPS DSP Arithmetic Sub-class */
587 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
588 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
589 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
590 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
591 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
592 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
593 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
594 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
595 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
596 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
597 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
598 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
599 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
600 /* DSP Bit/Manipulation Sub-class */
601 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
602 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
603 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
604 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
605 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
608 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Arithmetic Sub-class */
611 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
612 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
613 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
614 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
615 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
616 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
617 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
618 /* DSP Compare-Pick Sub-class */
619 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
620 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
621 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
622 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
623 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
624 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
625 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
626 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
627 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
628 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
629 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
630 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
631 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
632 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
633 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
636 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
638 /* MIPS DSP GPR-Based Shift Sub-class */
639 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
640 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
641 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
642 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
643 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
644 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
645 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
646 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
647 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
648 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
649 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
650 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
651 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
652 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
653 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
654 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
655 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
656 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
657 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
658 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
659 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
660 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
663 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
665 /* MIPS DSP Multiply Sub-class insns */
666 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
667 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
668 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
669 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
670 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
671 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
672 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
673 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
674 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
675 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
676 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
677 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
678 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
679 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
680 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
681 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
682 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
683 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
684 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
685 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
686 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
687 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
690 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
692 /* DSP Bit/Manipulation Sub-class */
693 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
696 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
698 /* MIPS DSP Append Sub-class */
699 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
700 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
701 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
704 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
706 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
707 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
708 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
709 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
710 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
711 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
712 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
713 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
714 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
715 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
716 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
717 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
718 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
719 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
720 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
721 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
722 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
723 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
726 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
728 /* MIPS DSP Arithmetic Sub-class */
729 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
730 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
731 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
732 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
733 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
734 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
735 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
736 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
737 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
738 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
739 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
740 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
741 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
742 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
743 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
744 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
745 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
746 /* DSP Bit/Manipulation Sub-class */
747 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
748 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
749 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
750 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
751 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
752 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
755 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
757 /* MIPS DSP Multiply Sub-class insns */
758 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
759 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
760 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
761 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
762 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
763 /* MIPS DSP Arithmetic Sub-class */
764 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
765 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
766 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
767 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
768 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
769 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
770 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
771 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
772 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
773 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
774 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
775 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
776 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
777 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
778 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
779 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
780 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
781 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
782 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
783 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
784 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
787 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
789 /* DSP Compare-Pick Sub-class */
790 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
791 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
792 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
793 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
794 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
795 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
796 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
797 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
798 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
799 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
800 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
801 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
802 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
803 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
804 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
805 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
806 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
807 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
808 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
809 /* MIPS DSP Arithmetic Sub-class */
810 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
811 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
812 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
813 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
814 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
815 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
816 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
817 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
820 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
822 /* DSP Append Sub-class */
823 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
824 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
825 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
826 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
829 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
831 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
832 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
833 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
834 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
835 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
836 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
837 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
838 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
839 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
840 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
841 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
842 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
843 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
844 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
845 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
846 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
847 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
848 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
849 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
850 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
851 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
852 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
855 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
857 /* DSP Bit/Manipulation Sub-class */
858 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
861 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
863 /* MIPS DSP Multiply Sub-class insns */
864 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
865 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
866 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
867 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
868 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
869 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
870 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
871 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
872 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
873 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
874 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
875 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
876 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
877 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
878 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
879 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
880 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
881 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
882 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
883 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
884 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
885 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
886 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
887 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
888 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
889 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
892 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
894 /* MIPS DSP GPR-Based Shift Sub-class */
895 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
896 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
897 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
898 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
899 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
900 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
901 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
902 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
903 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
904 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
905 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
906 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
907 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
908 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
909 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
910 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
911 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
912 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
913 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
914 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
915 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
916 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
917 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
918 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
919 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
920 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
923 /* Coprocessor 0 (rs field) */
924 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
927 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
928 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
929 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
930 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
931 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
932 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
933 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
934 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
935 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
936 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
937 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
938 OPC_C0
= (0x10 << 21) | OPC_CP0
,
939 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
940 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
941 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
942 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
943 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
944 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
945 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
946 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
947 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
948 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
949 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
950 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
951 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
952 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
953 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
957 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
960 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
961 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
962 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
963 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
964 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
965 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
966 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
967 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
970 /* Coprocessor 0 (with rs == C0) */
971 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
974 OPC_TLBR
= 0x01 | OPC_C0
,
975 OPC_TLBWI
= 0x02 | OPC_C0
,
976 OPC_TLBINV
= 0x03 | OPC_C0
,
977 OPC_TLBINVF
= 0x04 | OPC_C0
,
978 OPC_TLBWR
= 0x06 | OPC_C0
,
979 OPC_TLBP
= 0x08 | OPC_C0
,
980 OPC_RFE
= 0x10 | OPC_C0
,
981 OPC_ERET
= 0x18 | OPC_C0
,
982 OPC_DERET
= 0x1F | OPC_C0
,
983 OPC_WAIT
= 0x20 | OPC_C0
,
986 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
989 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
990 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
991 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
992 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
993 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
994 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
995 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
996 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
997 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
998 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
999 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1002 #define MASK_LMMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1005 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1006 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1015 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1024 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1027 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1028 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1029 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1030 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1033 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1042 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1048 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1049 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1055 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1056 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1062 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1063 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1069 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1070 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1076 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1077 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1083 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1084 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1090 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1091 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1099 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1102 OPC_LWXC1
= 0x00 | OPC_CP3
,
1103 OPC_LDXC1
= 0x01 | OPC_CP3
,
1104 OPC_LUXC1
= 0x05 | OPC_CP3
,
1105 OPC_SWXC1
= 0x08 | OPC_CP3
,
1106 OPC_SDXC1
= 0x09 | OPC_CP3
,
1107 OPC_SUXC1
= 0x0D | OPC_CP3
,
1108 OPC_PREFX
= 0x0F | OPC_CP3
,
1109 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1110 OPC_MADD_S
= 0x20 | OPC_CP3
,
1111 OPC_MADD_D
= 0x21 | OPC_CP3
,
1112 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1113 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1114 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1115 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1116 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1117 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1118 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1119 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1120 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1121 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1125 * MMI (MultiMedia Instruction) encodings
1126 * ======================================
1128 * MMI instructions encoding table keys:
1130 * * This code is reserved for future use. An attempt to execute it
1131 * causes a Reserved Instruction exception.
1132 * % This code indicates an instruction class. The instruction word
1133 * must be further decoded by examining additional tables that show
1134 * the values for other instruction fields.
1135 * # This code is reserved for the unsupported instructions DMULT,
1136 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
1137 * to execute it causes a Reserved Instruction exception.
1139 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
1142 * +--------+----------------------------------------+
1144 * +--------+----------------------------------------+
1146 * opcode bits 28..26
1147 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1148 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1149 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1150 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
1151 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
1152 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
1153 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
1154 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
1155 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
1156 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
1157 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
1161 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
1162 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
1166 * MMI instructions with opcode field = MMI:
1169 * +--------+-------------------------------+--------+
1170 * | MMI | |function|
1171 * +--------+-------------------------------+--------+
1173 * function bits 2..0
1174 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1175 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1176 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1177 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
1178 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
1179 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
1180 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
1181 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
1182 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
1183 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
1184 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
1187 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
1189 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
1190 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
1191 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
1192 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
1193 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
1194 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
1195 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
1196 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
1199 /* global register indices */
1200 TCGv cpu_gpr
[32], cpu_PC
;
1202 * For CPUs using 128-bit GPR registers, we put the lower halves in cpu_gpr[])
1203 * and the upper halves in cpu_gpr_hi[].
1205 TCGv_i64 cpu_gpr_hi
[32];
1206 TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1207 static TCGv cpu_dspctrl
, btarget
;
1209 static TCGv cpu_lladdr
, cpu_llval
;
1210 static TCGv_i32 hflags
;
1211 TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1212 TCGv_i64 fpu_f64
[32];
1214 #include "exec/gen-icount.h"
1216 static const char regnames_HI
[][4] = {
1217 "HI0", "HI1", "HI2", "HI3",
1220 static const char regnames_LO
[][4] = {
1221 "LO0", "LO1", "LO2", "LO3",
1224 /* General purpose registers moves. */
1225 void gen_load_gpr(TCGv t
, int reg
)
1228 tcg_gen_movi_tl(t
, 0);
1230 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1234 void gen_store_gpr(TCGv t
, int reg
)
1237 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1241 #if defined(TARGET_MIPS64)
1242 void gen_load_gpr_hi(TCGv_i64 t
, int reg
)
1245 tcg_gen_movi_i64(t
, 0);
1247 tcg_gen_mov_i64(t
, cpu_gpr_hi
[reg
]);
1251 void gen_store_gpr_hi(TCGv_i64 t
, int reg
)
1254 tcg_gen_mov_i64(cpu_gpr_hi
[reg
], t
);
1257 #endif /* TARGET_MIPS64 */
1259 /* Moves to/from shadow registers. */
1260 static inline void gen_load_srsgpr(int from
, int to
)
1262 TCGv t0
= tcg_temp_new();
1265 tcg_gen_movi_tl(t0
, 0);
1267 TCGv_i32 t2
= tcg_temp_new_i32();
1268 TCGv_ptr addr
= tcg_temp_new_ptr();
1270 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1271 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1272 tcg_gen_andi_i32(t2
, t2
, 0xf);
1273 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1274 tcg_gen_ext_i32_ptr(addr
, t2
);
1275 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1277 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1278 tcg_temp_free_ptr(addr
);
1279 tcg_temp_free_i32(t2
);
1281 gen_store_gpr(t0
, to
);
1285 static inline void gen_store_srsgpr(int from
, int to
)
1288 TCGv t0
= tcg_temp_new();
1289 TCGv_i32 t2
= tcg_temp_new_i32();
1290 TCGv_ptr addr
= tcg_temp_new_ptr();
1292 gen_load_gpr(t0
, from
);
1293 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1294 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1295 tcg_gen_andi_i32(t2
, t2
, 0xf);
1296 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1297 tcg_gen_ext_i32_ptr(addr
, t2
);
1298 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1300 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1301 tcg_temp_free_ptr(addr
);
1302 tcg_temp_free_i32(t2
);
1308 static inline void gen_save_pc(target_ulong pc
)
1310 tcg_gen_movi_tl(cpu_PC
, pc
);
1313 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1315 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1316 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
1317 gen_save_pc(ctx
->base
.pc_next
);
1318 ctx
->saved_pc
= ctx
->base
.pc_next
;
1320 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1321 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1322 ctx
->saved_hflags
= ctx
->hflags
;
1323 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1329 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1335 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1337 ctx
->saved_hflags
= ctx
->hflags
;
1338 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1344 ctx
->btarget
= env
->btarget
;
1349 void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1351 save_cpu_state(ctx
, 1);
1352 gen_helper_raise_exception_err(cpu_env
, tcg_constant_i32(excp
),
1353 tcg_constant_i32(err
));
1354 ctx
->base
.is_jmp
= DISAS_NORETURN
;
1357 void generate_exception(DisasContext
*ctx
, int excp
)
1359 gen_helper_raise_exception(cpu_env
, tcg_constant_i32(excp
));
1362 void generate_exception_end(DisasContext
*ctx
, int excp
)
1364 generate_exception_err(ctx
, excp
, 0);
1367 void generate_exception_break(DisasContext
*ctx
, int code
)
1369 #ifdef CONFIG_USER_ONLY
1370 /* Pass the break code along to cpu_loop. */
1371 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
1372 offsetof(CPUMIPSState
, error_code
));
1374 generate_exception_end(ctx
, EXCP_BREAK
);
1377 void gen_reserved_instruction(DisasContext
*ctx
)
1379 generate_exception_end(ctx
, EXCP_RI
);
1382 /* Floating point register moves. */
1383 void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1385 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1386 generate_exception(ctx
, EXCP_RI
);
1388 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1391 void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1394 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1395 generate_exception(ctx
, EXCP_RI
);
1397 t64
= tcg_temp_new_i64();
1398 tcg_gen_extu_i32_i64(t64
, t
);
1399 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1400 tcg_temp_free_i64(t64
);
1403 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1405 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1406 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1408 gen_load_fpr32(ctx
, t
, reg
| 1);
1412 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1414 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1415 TCGv_i64 t64
= tcg_temp_new_i64();
1416 tcg_gen_extu_i32_i64(t64
, t
);
1417 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1418 tcg_temp_free_i64(t64
);
1420 gen_store_fpr32(ctx
, t
, reg
| 1);
1424 void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1426 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1427 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1429 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1433 void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1435 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1436 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1439 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1440 t0
= tcg_temp_new_i64();
1441 tcg_gen_shri_i64(t0
, t
, 32);
1442 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1443 tcg_temp_free_i64(t0
);
1447 int get_fp_bit(int cc
)
1456 /* Addresses computation */
1457 void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1459 tcg_gen_add_tl(ret
, arg0
, arg1
);
1461 #if defined(TARGET_MIPS64)
1462 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1463 tcg_gen_ext32s_i64(ret
, ret
);
1468 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
1471 tcg_gen_addi_tl(ret
, base
, ofs
);
1473 #if defined(TARGET_MIPS64)
1474 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1475 tcg_gen_ext32s_i64(ret
, ret
);
1480 /* Addresses computation (translation time) */
1481 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1484 target_long sum
= base
+ offset
;
1486 #if defined(TARGET_MIPS64)
1487 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1494 /* Sign-extract the low 32-bits to a target_long. */
1495 void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1497 #if defined(TARGET_MIPS64)
1498 tcg_gen_ext32s_i64(ret
, arg
);
1500 tcg_gen_extrl_i64_i32(ret
, arg
);
1504 /* Sign-extract the high 32-bits to a target_long. */
1505 void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1507 #if defined(TARGET_MIPS64)
1508 tcg_gen_sari_i64(ret
, arg
, 32);
1510 tcg_gen_extrh_i64_i32(ret
, arg
);
1514 bool check_cp0_enabled(DisasContext
*ctx
)
1516 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1517 generate_exception_end(ctx
, EXCP_CpU
);
1523 void check_cp1_enabled(DisasContext
*ctx
)
1525 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
1526 generate_exception_err(ctx
, EXCP_CpU
, 1);
1531 * Verify that the processor is running with COP1X instructions enabled.
1532 * This is associated with the nabla symbol in the MIPS32 and MIPS64
1535 void check_cop1x(DisasContext
*ctx
)
1537 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
1538 gen_reserved_instruction(ctx
);
1543 * Verify that the processor is running with 64-bit floating-point
1544 * operations enabled.
1546 void check_cp1_64bitmode(DisasContext
*ctx
)
1548 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
1549 gen_reserved_instruction(ctx
);
1554 * Verify if floating point register is valid; an operation is not defined
1555 * if bit 0 of any register specification is set and the FR bit in the
1556 * Status register equals zero, since the register numbers specify an
1557 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1558 * in the Status register equals one, both even and odd register numbers
1559 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1561 * Multiple 64 bit wide registers can be checked by calling
1562 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1564 void check_cp1_registers(DisasContext
*ctx
, int regs
)
1566 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
1567 gen_reserved_instruction(ctx
);
1572 * Verify that the processor is running with DSP instructions enabled.
1573 * This is enabled by CP0 Status register MX(24) bit.
1575 static inline void check_dsp(DisasContext
*ctx
)
1577 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1578 if (ctx
->insn_flags
& ASE_DSP
) {
1579 generate_exception_end(ctx
, EXCP_DSPDIS
);
1581 gen_reserved_instruction(ctx
);
1586 static inline void check_dsp_r2(DisasContext
*ctx
)
1588 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
1589 if (ctx
->insn_flags
& ASE_DSP
) {
1590 generate_exception_end(ctx
, EXCP_DSPDIS
);
1592 gen_reserved_instruction(ctx
);
1597 static inline void check_dsp_r3(DisasContext
*ctx
)
1599 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
1600 if (ctx
->insn_flags
& ASE_DSP
) {
1601 generate_exception_end(ctx
, EXCP_DSPDIS
);
1603 gen_reserved_instruction(ctx
);
1609 * This code generates a "reserved instruction" exception if the
1610 * CPU does not support the instruction set corresponding to flags.
1612 void check_insn(DisasContext
*ctx
, uint64_t flags
)
1614 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1615 gen_reserved_instruction(ctx
);
1620 * This code generates a "reserved instruction" exception if the
1621 * CPU has corresponding flag set which indicates that the instruction
1624 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
1626 if (unlikely(ctx
->insn_flags
& flags
)) {
1627 gen_reserved_instruction(ctx
);
1632 * The Linux kernel traps certain reserved instruction exceptions to
1633 * emulate the corresponding instructions. QEMU is the kernel in user
1634 * mode, so those traps are emulated by accepting the instructions.
1636 * A reserved instruction exception is generated for flagged CPUs if
1637 * QEMU runs in system mode.
1639 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
1641 #ifndef CONFIG_USER_ONLY
1642 check_insn_opc_removed(ctx
, flags
);
1647 * This code generates a "reserved instruction" exception if the
1648 * CPU does not support 64-bit paired-single (PS) floating point data type.
1650 static inline void check_ps(DisasContext
*ctx
)
1652 if (unlikely(!ctx
->ps
)) {
1653 generate_exception(ctx
, EXCP_RI
);
1655 check_cp1_64bitmode(ctx
);
1659 * This code generates a "reserved instruction" exception if cpu is not
1660 * 64-bit or 64-bit instructions are not enabled.
1662 void check_mips_64(DisasContext
*ctx
)
1664 if (unlikely((TARGET_LONG_BITS
!= 64) || !(ctx
->hflags
& MIPS_HFLAG_64
))) {
1665 gen_reserved_instruction(ctx
);
1669 #ifndef CONFIG_USER_ONLY
1670 static inline void check_mvh(DisasContext
*ctx
)
1672 if (unlikely(!ctx
->mvh
)) {
1673 generate_exception(ctx
, EXCP_RI
);
1679 * This code generates a "reserved instruction" exception if the
1680 * Config5 XNP bit is set.
1682 static inline void check_xnp(DisasContext
*ctx
)
1684 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
1685 gen_reserved_instruction(ctx
);
1689 #ifndef CONFIG_USER_ONLY
1691 * This code generates a "reserved instruction" exception if the
1692 * Config3 PW bit is NOT set.
1694 static inline void check_pw(DisasContext
*ctx
)
1696 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
1697 gen_reserved_instruction(ctx
);
1703 * This code generates a "reserved instruction" exception if the
1704 * Config3 MT bit is NOT set.
1706 static inline void check_mt(DisasContext
*ctx
)
1708 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1709 gen_reserved_instruction(ctx
);
1713 #ifndef CONFIG_USER_ONLY
1715 * This code generates a "coprocessor unusable" exception if CP0 is not
1716 * available, and, if that is not the case, generates a "reserved instruction"
1717 * exception if the Config5 MT bit is NOT set. This is needed for availability
1718 * control of some of MT ASE instructions.
1720 static inline void check_cp0_mt(DisasContext
*ctx
)
1722 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
1723 generate_exception_end(ctx
, EXCP_CpU
);
1725 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
1726 gen_reserved_instruction(ctx
);
1733 * This code generates a "reserved instruction" exception if the
1734 * Config5 NMS bit is set.
1736 static inline void check_nms(DisasContext
*ctx
)
1738 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
1739 gen_reserved_instruction(ctx
);
1744 * This code generates a "reserved instruction" exception if the
1745 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
1746 * Config2 TL, and Config5 L2C are unset.
1748 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
1750 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
1751 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
1752 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
1753 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
1754 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
1755 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
1756 gen_reserved_instruction(ctx
);
1761 * This code generates a "reserved instruction" exception if the
1762 * Config5 EVA bit is NOT set.
1764 static inline void check_eva(DisasContext
*ctx
)
1766 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
1767 gen_reserved_instruction(ctx
);
1773 * Define small wrappers for gen_load_fpr* so that we have a uniform
1774 * calling interface for 32 and 64-bit FPRs. No sense in changing
1775 * all callers for gen_load_fpr32 when we need the CTX parameter for
1778 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1779 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1780 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1781 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1782 int ft, int fs, int cc) \
1784 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
1785 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
1794 check_cp1_registers(ctx, fs | ft); \
1802 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
1803 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
1806 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
1809 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
1812 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
1815 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
1818 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
1821 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
1824 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
1827 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
1830 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
1833 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
1836 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
1839 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
1842 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
1845 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
1848 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
1851 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
1856 tcg_temp_free_i##bits(fp0); \
1857 tcg_temp_free_i##bits(fp1); \
1860 FOP_CONDS(, 0, d
, FMT_D
, 64)
1861 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1862 FOP_CONDS(, 0, s
, FMT_S
, 32)
1863 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1864 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1865 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1868 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1869 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
1870 int ft, int fs, int fd) \
1872 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1873 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1874 if (ifmt == FMT_D) { \
1875 check_cp1_registers(ctx, fs | ft | fd); \
1877 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1878 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1881 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1884 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1887 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1890 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1893 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1896 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1899 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1902 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1905 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1908 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1911 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1914 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1917 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1920 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1923 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1926 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1929 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
1932 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
1935 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
1938 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
1941 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
1944 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
1950 tcg_temp_free_i ## bits(fp0); \
1951 tcg_temp_free_i ## bits(fp1); \
1954 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
1955 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
1957 #undef gen_ldcmp_fpr32
1958 #undef gen_ldcmp_fpr64
1960 /* load/store instructions. */
1961 #ifdef CONFIG_USER_ONLY
1962 #define OP_LD_ATOMIC(insn, fname) \
1963 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1964 DisasContext *ctx) \
1966 TCGv t0 = tcg_temp_new(); \
1967 tcg_gen_mov_tl(t0, arg1); \
1968 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
1969 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
1970 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
1971 tcg_temp_free(t0); \
1974 #define OP_LD_ATOMIC(insn, fname) \
1975 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
1976 DisasContext *ctx) \
1978 gen_helper_##insn(ret, cpu_env, arg1, tcg_constant_i32(mem_idx)); \
1981 OP_LD_ATOMIC(ll
, ld32s
);
1982 #if defined(TARGET_MIPS64)
1983 OP_LD_ATOMIC(lld
, ld64
);
1987 void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
, int base
, int offset
)
1990 tcg_gen_movi_tl(addr
, offset
);
1991 } else if (offset
== 0) {
1992 gen_load_gpr(addr
, base
);
1994 tcg_gen_movi_tl(addr
, offset
);
1995 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1999 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
2001 target_ulong pc
= ctx
->base
.pc_next
;
2003 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2004 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2009 pc
&= ~(target_ulong
)3;
2014 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2015 int rt
, int base
, int offset
)
2018 int mem_idx
= ctx
->mem_idx
;
2020 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
|
2023 * Loongson CPU uses a load to zero register for prefetch.
2024 * We emulate it as a NOP. On other CPU we must perform the
2025 * actual memory access.
2030 t0
= tcg_temp_new();
2031 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2034 #if defined(TARGET_MIPS64)
2036 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2037 ctx
->default_tcg_memop_mask
);
2038 gen_store_gpr(t0
, rt
);
2041 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
|
2042 ctx
->default_tcg_memop_mask
);
2043 gen_store_gpr(t0
, rt
);
2047 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2048 gen_store_gpr(t0
, rt
);
2051 t1
= tcg_temp_new();
2053 * Do a byte access to possibly trigger a page
2054 * fault with the unaligned address.
2056 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2057 tcg_gen_andi_tl(t1
, t0
, 7);
2058 if (!cpu_is_bigendian(ctx
)) {
2059 tcg_gen_xori_tl(t1
, t1
, 7);
2061 tcg_gen_shli_tl(t1
, t1
, 3);
2062 tcg_gen_andi_tl(t0
, t0
, ~7);
2063 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2064 tcg_gen_shl_tl(t0
, t0
, t1
);
2065 t2
= tcg_const_tl(-1);
2066 tcg_gen_shl_tl(t2
, t2
, t1
);
2067 gen_load_gpr(t1
, rt
);
2068 tcg_gen_andc_tl(t1
, t1
, t2
);
2070 tcg_gen_or_tl(t0
, t0
, t1
);
2072 gen_store_gpr(t0
, rt
);
2075 t1
= tcg_temp_new();
2077 * Do a byte access to possibly trigger a page
2078 * fault with the unaligned address.
2080 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2081 tcg_gen_andi_tl(t1
, t0
, 7);
2082 if (cpu_is_bigendian(ctx
)) {
2083 tcg_gen_xori_tl(t1
, t1
, 7);
2085 tcg_gen_shli_tl(t1
, t1
, 3);
2086 tcg_gen_andi_tl(t0
, t0
, ~7);
2087 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2088 tcg_gen_shr_tl(t0
, t0
, t1
);
2089 tcg_gen_xori_tl(t1
, t1
, 63);
2090 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2091 tcg_gen_shl_tl(t2
, t2
, t1
);
2092 gen_load_gpr(t1
, rt
);
2093 tcg_gen_and_tl(t1
, t1
, t2
);
2095 tcg_gen_or_tl(t0
, t0
, t1
);
2097 gen_store_gpr(t0
, rt
);
2100 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2101 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2103 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUQ
);
2104 gen_store_gpr(t0
, rt
);
2108 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2109 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2111 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2112 gen_store_gpr(t0
, rt
);
2115 mem_idx
= MIPS_HFLAG_UM
;
2118 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2119 ctx
->default_tcg_memop_mask
);
2120 gen_store_gpr(t0
, rt
);
2123 mem_idx
= MIPS_HFLAG_UM
;
2126 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2127 ctx
->default_tcg_memop_mask
);
2128 gen_store_gpr(t0
, rt
);
2131 mem_idx
= MIPS_HFLAG_UM
;
2134 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2135 ctx
->default_tcg_memop_mask
);
2136 gen_store_gpr(t0
, rt
);
2139 mem_idx
= MIPS_HFLAG_UM
;
2142 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2143 gen_store_gpr(t0
, rt
);
2146 mem_idx
= MIPS_HFLAG_UM
;
2149 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2150 gen_store_gpr(t0
, rt
);
2153 mem_idx
= MIPS_HFLAG_UM
;
2156 t1
= tcg_temp_new();
2158 * Do a byte access to possibly trigger a page
2159 * fault with the unaligned address.
2161 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2162 tcg_gen_andi_tl(t1
, t0
, 3);
2163 if (!cpu_is_bigendian(ctx
)) {
2164 tcg_gen_xori_tl(t1
, t1
, 3);
2166 tcg_gen_shli_tl(t1
, t1
, 3);
2167 tcg_gen_andi_tl(t0
, t0
, ~3);
2168 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2169 tcg_gen_shl_tl(t0
, t0
, t1
);
2170 t2
= tcg_const_tl(-1);
2171 tcg_gen_shl_tl(t2
, t2
, t1
);
2172 gen_load_gpr(t1
, rt
);
2173 tcg_gen_andc_tl(t1
, t1
, t2
);
2175 tcg_gen_or_tl(t0
, t0
, t1
);
2177 tcg_gen_ext32s_tl(t0
, t0
);
2178 gen_store_gpr(t0
, rt
);
2181 mem_idx
= MIPS_HFLAG_UM
;
2184 t1
= tcg_temp_new();
2186 * Do a byte access to possibly trigger a page
2187 * fault with the unaligned address.
2189 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2190 tcg_gen_andi_tl(t1
, t0
, 3);
2191 if (cpu_is_bigendian(ctx
)) {
2192 tcg_gen_xori_tl(t1
, t1
, 3);
2194 tcg_gen_shli_tl(t1
, t1
, 3);
2195 tcg_gen_andi_tl(t0
, t0
, ~3);
2196 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
2197 tcg_gen_shr_tl(t0
, t0
, t1
);
2198 tcg_gen_xori_tl(t1
, t1
, 31);
2199 t2
= tcg_const_tl(0xfffffffeull
);
2200 tcg_gen_shl_tl(t2
, t2
, t1
);
2201 gen_load_gpr(t1
, rt
);
2202 tcg_gen_and_tl(t1
, t1
, t2
);
2204 tcg_gen_or_tl(t0
, t0
, t1
);
2206 tcg_gen_ext32s_tl(t0
, t0
);
2207 gen_store_gpr(t0
, rt
);
2210 mem_idx
= MIPS_HFLAG_UM
;
2214 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
2215 gen_store_gpr(t0
, rt
);
2222 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
2223 int base
, int offset
)
2225 TCGv t0
= tcg_temp_new();
2226 TCGv t1
= tcg_temp_new();
2227 int mem_idx
= ctx
->mem_idx
;
2229 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2230 gen_load_gpr(t1
, rt
);
2232 #if defined(TARGET_MIPS64)
2234 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUQ
|
2235 ctx
->default_tcg_memop_mask
);
2238 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
2241 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
2245 mem_idx
= MIPS_HFLAG_UM
;
2248 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
2249 ctx
->default_tcg_memop_mask
);
2252 mem_idx
= MIPS_HFLAG_UM
;
2255 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
2256 ctx
->default_tcg_memop_mask
);
2259 mem_idx
= MIPS_HFLAG_UM
;
2262 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
2265 mem_idx
= MIPS_HFLAG_UM
;
2268 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
2271 mem_idx
= MIPS_HFLAG_UM
;
2274 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
2282 /* Store conditional */
2283 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
2284 MemOp tcg_mo
, bool eva
)
2287 TCGLabel
*l1
= gen_new_label();
2288 TCGLabel
*done
= gen_new_label();
2290 t0
= tcg_temp_new();
2291 addr
= tcg_temp_new();
2292 /* compare the address against that of the preceding LL */
2293 gen_base_offset_addr(ctx
, addr
, base
, offset
);
2294 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
2295 tcg_temp_free(addr
);
2296 tcg_gen_movi_tl(t0
, 0);
2297 gen_store_gpr(t0
, rt
);
2301 /* generate cmpxchg */
2302 val
= tcg_temp_new();
2303 gen_load_gpr(val
, rt
);
2304 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
2305 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
2306 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
2307 gen_store_gpr(t0
, rt
);
2310 gen_set_label(done
);
2314 /* Load and store */
2315 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
2319 * Don't do NOP if destination is zero: we must perform the actual
2325 TCGv_i32 fp0
= tcg_temp_new_i32();
2326 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2327 ctx
->default_tcg_memop_mask
);
2328 gen_store_fpr32(ctx
, fp0
, ft
);
2329 tcg_temp_free_i32(fp0
);
2334 TCGv_i32 fp0
= tcg_temp_new_i32();
2335 gen_load_fpr32(ctx
, fp0
, ft
);
2336 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2337 ctx
->default_tcg_memop_mask
);
2338 tcg_temp_free_i32(fp0
);
2343 TCGv_i64 fp0
= tcg_temp_new_i64();
2344 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2345 ctx
->default_tcg_memop_mask
);
2346 gen_store_fpr64(ctx
, fp0
, ft
);
2347 tcg_temp_free_i64(fp0
);
2352 TCGv_i64 fp0
= tcg_temp_new_i64();
2353 gen_load_fpr64(ctx
, fp0
, ft
);
2354 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
2355 ctx
->default_tcg_memop_mask
);
2356 tcg_temp_free_i64(fp0
);
2360 MIPS_INVAL("flt_ldst");
2361 gen_reserved_instruction(ctx
);
2366 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2367 int rs
, int16_t imm
)
2369 TCGv t0
= tcg_temp_new();
2371 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2372 check_cp1_enabled(ctx
);
2376 check_insn(ctx
, ISA_MIPS2
);
2379 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
2380 gen_flt_ldst(ctx
, op
, rt
, t0
);
2383 generate_exception_err(ctx
, EXCP_CpU
, 1);
2388 /* Arithmetic with immediate operand */
2389 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2390 int rt
, int rs
, int imm
)
2392 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2394 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2396 * If no destination, treat it as a NOP.
2397 * For addi, we must generate the overflow exception when needed.
2404 TCGv t0
= tcg_temp_local_new();
2405 TCGv t1
= tcg_temp_new();
2406 TCGv t2
= tcg_temp_new();
2407 TCGLabel
*l1
= gen_new_label();
2409 gen_load_gpr(t1
, rs
);
2410 tcg_gen_addi_tl(t0
, t1
, uimm
);
2411 tcg_gen_ext32s_tl(t0
, t0
);
2413 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2414 tcg_gen_xori_tl(t2
, t0
, uimm
);
2415 tcg_gen_and_tl(t1
, t1
, t2
);
2417 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2419 /* operands of same sign, result different sign */
2420 generate_exception(ctx
, EXCP_OVERFLOW
);
2422 tcg_gen_ext32s_tl(t0
, t0
);
2423 gen_store_gpr(t0
, rt
);
2429 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2430 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2432 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2435 #if defined(TARGET_MIPS64)
2438 TCGv t0
= tcg_temp_local_new();
2439 TCGv t1
= tcg_temp_new();
2440 TCGv t2
= tcg_temp_new();
2441 TCGLabel
*l1
= gen_new_label();
2443 gen_load_gpr(t1
, rs
);
2444 tcg_gen_addi_tl(t0
, t1
, uimm
);
2446 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2447 tcg_gen_xori_tl(t2
, t0
, uimm
);
2448 tcg_gen_and_tl(t1
, t1
, t2
);
2450 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2452 /* operands of same sign, result different sign */
2453 generate_exception(ctx
, EXCP_OVERFLOW
);
2455 gen_store_gpr(t0
, rt
);
2461 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2463 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2470 /* Logic with immediate operand */
2471 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2472 int rt
, int rs
, int16_t imm
)
2477 /* If no destination, treat it as a NOP. */
2480 uimm
= (uint16_t)imm
;
2483 if (likely(rs
!= 0)) {
2484 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2486 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2491 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2493 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2497 if (likely(rs
!= 0)) {
2498 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2500 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2504 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS_R6
)) {
2506 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2507 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2509 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2518 /* Set on less than with immediate operand */
2519 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2520 int rt
, int rs
, int16_t imm
)
2522 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2526 /* If no destination, treat it as a NOP. */
2529 t0
= tcg_temp_new();
2530 gen_load_gpr(t0
, rs
);
2533 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2536 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2542 /* Shifts with immediate operand */
2543 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2544 int rt
, int rs
, int16_t imm
)
2546 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2550 /* If no destination, treat it as a NOP. */
2554 t0
= tcg_temp_new();
2555 gen_load_gpr(t0
, rs
);
2558 tcg_gen_shli_tl(t0
, t0
, uimm
);
2559 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2562 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2566 tcg_gen_ext32u_tl(t0
, t0
);
2567 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2569 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2574 TCGv_i32 t1
= tcg_temp_new_i32();
2576 tcg_gen_trunc_tl_i32(t1
, t0
);
2577 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2578 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2579 tcg_temp_free_i32(t1
);
2581 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2584 #if defined(TARGET_MIPS64)
2586 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2589 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2592 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2596 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2598 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2602 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2605 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2608 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2611 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2619 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2620 int rd
, int rs
, int rt
)
2622 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2623 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2625 * If no destination, treat it as a NOP.
2626 * For add & sub, we must generate the overflow exception when needed.
2634 TCGv t0
= tcg_temp_local_new();
2635 TCGv t1
= tcg_temp_new();
2636 TCGv t2
= tcg_temp_new();
2637 TCGLabel
*l1
= gen_new_label();
2639 gen_load_gpr(t1
, rs
);
2640 gen_load_gpr(t2
, rt
);
2641 tcg_gen_add_tl(t0
, t1
, t2
);
2642 tcg_gen_ext32s_tl(t0
, t0
);
2643 tcg_gen_xor_tl(t1
, t1
, t2
);
2644 tcg_gen_xor_tl(t2
, t0
, t2
);
2645 tcg_gen_andc_tl(t1
, t2
, t1
);
2647 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2649 /* operands of same sign, result different sign */
2650 generate_exception(ctx
, EXCP_OVERFLOW
);
2652 gen_store_gpr(t0
, rd
);
2657 if (rs
!= 0 && rt
!= 0) {
2658 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2659 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2660 } else if (rs
== 0 && rt
!= 0) {
2661 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2662 } else if (rs
!= 0 && rt
== 0) {
2663 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2665 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2670 TCGv t0
= tcg_temp_local_new();
2671 TCGv t1
= tcg_temp_new();
2672 TCGv t2
= tcg_temp_new();
2673 TCGLabel
*l1
= gen_new_label();
2675 gen_load_gpr(t1
, rs
);
2676 gen_load_gpr(t2
, rt
);
2677 tcg_gen_sub_tl(t0
, t1
, t2
);
2678 tcg_gen_ext32s_tl(t0
, t0
);
2679 tcg_gen_xor_tl(t2
, t1
, t2
);
2680 tcg_gen_xor_tl(t1
, t0
, t1
);
2681 tcg_gen_and_tl(t1
, t1
, t2
);
2683 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2686 * operands of different sign, first operand and the result
2689 generate_exception(ctx
, EXCP_OVERFLOW
);
2691 gen_store_gpr(t0
, rd
);
2696 if (rs
!= 0 && rt
!= 0) {
2697 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2698 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2699 } else if (rs
== 0 && rt
!= 0) {
2700 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2701 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2702 } else if (rs
!= 0 && rt
== 0) {
2703 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2705 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2708 #if defined(TARGET_MIPS64)
2711 TCGv t0
= tcg_temp_local_new();
2712 TCGv t1
= tcg_temp_new();
2713 TCGv t2
= tcg_temp_new();
2714 TCGLabel
*l1
= gen_new_label();
2716 gen_load_gpr(t1
, rs
);
2717 gen_load_gpr(t2
, rt
);
2718 tcg_gen_add_tl(t0
, t1
, t2
);
2719 tcg_gen_xor_tl(t1
, t1
, t2
);
2720 tcg_gen_xor_tl(t2
, t0
, t2
);
2721 tcg_gen_andc_tl(t1
, t2
, t1
);
2723 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2725 /* operands of same sign, result different sign */
2726 generate_exception(ctx
, EXCP_OVERFLOW
);
2728 gen_store_gpr(t0
, rd
);
2733 if (rs
!= 0 && rt
!= 0) {
2734 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2735 } else if (rs
== 0 && rt
!= 0) {
2736 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2737 } else if (rs
!= 0 && rt
== 0) {
2738 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2745 TCGv t0
= tcg_temp_local_new();
2746 TCGv t1
= tcg_temp_new();
2747 TCGv t2
= tcg_temp_new();
2748 TCGLabel
*l1
= gen_new_label();
2750 gen_load_gpr(t1
, rs
);
2751 gen_load_gpr(t2
, rt
);
2752 tcg_gen_sub_tl(t0
, t1
, t2
);
2753 tcg_gen_xor_tl(t2
, t1
, t2
);
2754 tcg_gen_xor_tl(t1
, t0
, t1
);
2755 tcg_gen_and_tl(t1
, t1
, t2
);
2757 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2760 * Operands of different sign, first operand and result different
2763 generate_exception(ctx
, EXCP_OVERFLOW
);
2765 gen_store_gpr(t0
, rd
);
2770 if (rs
!= 0 && rt
!= 0) {
2771 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2772 } else if (rs
== 0 && rt
!= 0) {
2773 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2774 } else if (rs
!= 0 && rt
== 0) {
2775 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2777 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2782 if (likely(rs
!= 0 && rt
!= 0)) {
2783 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2784 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2786 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2792 /* Conditional move */
2793 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2794 int rd
, int rs
, int rt
)
2799 /* If no destination, treat it as a NOP. */
2803 t0
= tcg_temp_new();
2804 gen_load_gpr(t0
, rt
);
2805 t1
= tcg_const_tl(0);
2806 t2
= tcg_temp_new();
2807 gen_load_gpr(t2
, rs
);
2810 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2813 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2816 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2819 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2828 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2829 int rd
, int rs
, int rt
)
2832 /* If no destination, treat it as a NOP. */
2838 if (likely(rs
!= 0 && rt
!= 0)) {
2839 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2841 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2845 if (rs
!= 0 && rt
!= 0) {
2846 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2847 } else if (rs
== 0 && rt
!= 0) {
2848 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2849 } else if (rs
!= 0 && rt
== 0) {
2850 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2852 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2856 if (likely(rs
!= 0 && rt
!= 0)) {
2857 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2858 } else if (rs
== 0 && rt
!= 0) {
2859 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2860 } else if (rs
!= 0 && rt
== 0) {
2861 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2863 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2867 if (likely(rs
!= 0 && rt
!= 0)) {
2868 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2869 } else if (rs
== 0 && rt
!= 0) {
2870 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2871 } else if (rs
!= 0 && rt
== 0) {
2872 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2874 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2880 /* Set on lower than */
2881 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2882 int rd
, int rs
, int rt
)
2887 /* If no destination, treat it as a NOP. */
2891 t0
= tcg_temp_new();
2892 t1
= tcg_temp_new();
2893 gen_load_gpr(t0
, rs
);
2894 gen_load_gpr(t1
, rt
);
2897 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2900 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2908 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2909 int rd
, int rs
, int rt
)
2915 * If no destination, treat it as a NOP.
2916 * For add & sub, we must generate the overflow exception when needed.
2921 t0
= tcg_temp_new();
2922 t1
= tcg_temp_new();
2923 gen_load_gpr(t0
, rs
);
2924 gen_load_gpr(t1
, rt
);
2927 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2928 tcg_gen_shl_tl(t0
, t1
, t0
);
2929 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2932 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2933 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2936 tcg_gen_ext32u_tl(t1
, t1
);
2937 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2938 tcg_gen_shr_tl(t0
, t1
, t0
);
2939 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2943 TCGv_i32 t2
= tcg_temp_new_i32();
2944 TCGv_i32 t3
= tcg_temp_new_i32();
2946 tcg_gen_trunc_tl_i32(t2
, t0
);
2947 tcg_gen_trunc_tl_i32(t3
, t1
);
2948 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2949 tcg_gen_rotr_i32(t2
, t3
, t2
);
2950 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2951 tcg_temp_free_i32(t2
);
2952 tcg_temp_free_i32(t3
);
2955 #if defined(TARGET_MIPS64)
2957 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2958 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2961 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2962 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2966 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
2969 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2970 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
2978 /* Arithmetic on HI/LO registers */
2979 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
2981 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
2992 #if defined(TARGET_MIPS64)
2994 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
2998 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3002 #if defined(TARGET_MIPS64)
3004 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3008 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3013 #if defined(TARGET_MIPS64)
3015 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3019 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3022 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3027 #if defined(TARGET_MIPS64)
3029 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3033 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3036 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3042 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3045 TCGv t0
= tcg_const_tl(addr
);
3046 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3047 gen_store_gpr(t0
, reg
);
3051 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3057 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3060 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3061 addr
= addr_add(ctx
, pc
, offset
);
3062 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3066 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3067 addr
= addr_add(ctx
, pc
, offset
);
3068 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3070 #if defined(TARGET_MIPS64)
3073 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3074 addr
= addr_add(ctx
, pc
, offset
);
3075 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3079 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3082 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3083 addr
= addr_add(ctx
, pc
, offset
);
3084 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3089 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3090 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3091 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3094 #if defined(TARGET_MIPS64)
3095 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3096 case R6_OPC_LDPC
+ (1 << 16):
3097 case R6_OPC_LDPC
+ (2 << 16):
3098 case R6_OPC_LDPC
+ (3 << 16):
3100 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3101 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3102 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUQ
);
3106 MIPS_INVAL("OPC_PCREL");
3107 gen_reserved_instruction(ctx
);
3114 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3123 t0
= tcg_temp_new();
3124 t1
= tcg_temp_new();
3126 gen_load_gpr(t0
, rs
);
3127 gen_load_gpr(t1
, rt
);
3132 TCGv t2
= tcg_temp_new();
3133 TCGv t3
= tcg_temp_new();
3134 tcg_gen_ext32s_tl(t0
, t0
);
3135 tcg_gen_ext32s_tl(t1
, t1
);
3136 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3137 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3138 tcg_gen_and_tl(t2
, t2
, t3
);
3139 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3140 tcg_gen_or_tl(t2
, t2
, t3
);
3141 tcg_gen_movi_tl(t3
, 0);
3142 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3143 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3144 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3151 TCGv t2
= tcg_temp_new();
3152 TCGv t3
= tcg_temp_new();
3153 tcg_gen_ext32s_tl(t0
, t0
);
3154 tcg_gen_ext32s_tl(t1
, t1
);
3155 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3156 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3157 tcg_gen_and_tl(t2
, t2
, t3
);
3158 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3159 tcg_gen_or_tl(t2
, t2
, t3
);
3160 tcg_gen_movi_tl(t3
, 0);
3161 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3162 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3163 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3170 TCGv t2
= tcg_const_tl(0);
3171 TCGv t3
= tcg_const_tl(1);
3172 tcg_gen_ext32u_tl(t0
, t0
);
3173 tcg_gen_ext32u_tl(t1
, t1
);
3174 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3175 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3176 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3183 TCGv t2
= tcg_const_tl(0);
3184 TCGv t3
= tcg_const_tl(1);
3185 tcg_gen_ext32u_tl(t0
, t0
);
3186 tcg_gen_ext32u_tl(t1
, t1
);
3187 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3188 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3189 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3196 TCGv_i32 t2
= tcg_temp_new_i32();
3197 TCGv_i32 t3
= tcg_temp_new_i32();
3198 tcg_gen_trunc_tl_i32(t2
, t0
);
3199 tcg_gen_trunc_tl_i32(t3
, t1
);
3200 tcg_gen_mul_i32(t2
, t2
, t3
);
3201 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3202 tcg_temp_free_i32(t2
);
3203 tcg_temp_free_i32(t3
);
3208 TCGv_i32 t2
= tcg_temp_new_i32();
3209 TCGv_i32 t3
= tcg_temp_new_i32();
3210 tcg_gen_trunc_tl_i32(t2
, t0
);
3211 tcg_gen_trunc_tl_i32(t3
, t1
);
3212 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3213 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3214 tcg_temp_free_i32(t2
);
3215 tcg_temp_free_i32(t3
);
3220 TCGv_i32 t2
= tcg_temp_new_i32();
3221 TCGv_i32 t3
= tcg_temp_new_i32();
3222 tcg_gen_trunc_tl_i32(t2
, t0
);
3223 tcg_gen_trunc_tl_i32(t3
, t1
);
3224 tcg_gen_mul_i32(t2
, t2
, t3
);
3225 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3226 tcg_temp_free_i32(t2
);
3227 tcg_temp_free_i32(t3
);
3232 TCGv_i32 t2
= tcg_temp_new_i32();
3233 TCGv_i32 t3
= tcg_temp_new_i32();
3234 tcg_gen_trunc_tl_i32(t2
, t0
);
3235 tcg_gen_trunc_tl_i32(t3
, t1
);
3236 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3237 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3238 tcg_temp_free_i32(t2
);
3239 tcg_temp_free_i32(t3
);
3242 #if defined(TARGET_MIPS64)
3245 TCGv t2
= tcg_temp_new();
3246 TCGv t3
= tcg_temp_new();
3247 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3248 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3249 tcg_gen_and_tl(t2
, t2
, t3
);
3250 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3251 tcg_gen_or_tl(t2
, t2
, t3
);
3252 tcg_gen_movi_tl(t3
, 0);
3253 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3254 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3261 TCGv t2
= tcg_temp_new();
3262 TCGv t3
= tcg_temp_new();
3263 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3264 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3265 tcg_gen_and_tl(t2
, t2
, t3
);
3266 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3267 tcg_gen_or_tl(t2
, t2
, t3
);
3268 tcg_gen_movi_tl(t3
, 0);
3269 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3270 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3277 TCGv t2
= tcg_const_tl(0);
3278 TCGv t3
= tcg_const_tl(1);
3279 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3280 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3287 TCGv t2
= tcg_const_tl(0);
3288 TCGv t3
= tcg_const_tl(1);
3289 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3290 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3296 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3300 TCGv t2
= tcg_temp_new();
3301 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3306 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3310 TCGv t2
= tcg_temp_new();
3311 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3317 MIPS_INVAL("r6 mul/div");
3318 gen_reserved_instruction(ctx
);
3326 #if defined(TARGET_MIPS64)
3327 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
3331 t0
= tcg_temp_new();
3332 t1
= tcg_temp_new();
3334 gen_load_gpr(t0
, rs
);
3335 gen_load_gpr(t1
, rt
);
3340 TCGv t2
= tcg_temp_new();
3341 TCGv t3
= tcg_temp_new();
3342 tcg_gen_ext32s_tl(t0
, t0
);
3343 tcg_gen_ext32s_tl(t1
, t1
);
3344 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3345 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3346 tcg_gen_and_tl(t2
, t2
, t3
);
3347 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3348 tcg_gen_or_tl(t2
, t2
, t3
);
3349 tcg_gen_movi_tl(t3
, 0);
3350 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3351 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
3352 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
3353 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3354 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3361 TCGv t2
= tcg_const_tl(0);
3362 TCGv t3
= tcg_const_tl(1);
3363 tcg_gen_ext32u_tl(t0
, t0
);
3364 tcg_gen_ext32u_tl(t1
, t1
);
3365 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3366 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
3367 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
3368 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
3369 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
3375 MIPS_INVAL("div1 TX79");
3376 gen_reserved_instruction(ctx
);
3385 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3386 int acc
, int rs
, int rt
)
3390 t0
= tcg_temp_new();
3391 t1
= tcg_temp_new();
3393 gen_load_gpr(t0
, rs
);
3394 gen_load_gpr(t1
, rt
);
3403 TCGv t2
= tcg_temp_new();
3404 TCGv t3
= tcg_temp_new();
3405 tcg_gen_ext32s_tl(t0
, t0
);
3406 tcg_gen_ext32s_tl(t1
, t1
);
3407 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3408 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3409 tcg_gen_and_tl(t2
, t2
, t3
);
3410 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3411 tcg_gen_or_tl(t2
, t2
, t3
);
3412 tcg_gen_movi_tl(t3
, 0);
3413 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3414 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3415 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3416 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3417 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3424 TCGv t2
= tcg_const_tl(0);
3425 TCGv t3
= tcg_const_tl(1);
3426 tcg_gen_ext32u_tl(t0
, t0
);
3427 tcg_gen_ext32u_tl(t1
, t1
);
3428 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3429 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3430 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3431 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3432 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3439 TCGv_i32 t2
= tcg_temp_new_i32();
3440 TCGv_i32 t3
= tcg_temp_new_i32();
3441 tcg_gen_trunc_tl_i32(t2
, t0
);
3442 tcg_gen_trunc_tl_i32(t3
, t1
);
3443 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3444 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3445 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3446 tcg_temp_free_i32(t2
);
3447 tcg_temp_free_i32(t3
);
3452 TCGv_i32 t2
= tcg_temp_new_i32();
3453 TCGv_i32 t3
= tcg_temp_new_i32();
3454 tcg_gen_trunc_tl_i32(t2
, t0
);
3455 tcg_gen_trunc_tl_i32(t3
, t1
);
3456 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3457 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3458 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3459 tcg_temp_free_i32(t2
);
3460 tcg_temp_free_i32(t3
);
3463 #if defined(TARGET_MIPS64)
3466 TCGv t2
= tcg_temp_new();
3467 TCGv t3
= tcg_temp_new();
3468 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3469 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3470 tcg_gen_and_tl(t2
, t2
, t3
);
3471 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3472 tcg_gen_or_tl(t2
, t2
, t3
);
3473 tcg_gen_movi_tl(t3
, 0);
3474 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3475 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3476 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3483 TCGv t2
= tcg_const_tl(0);
3484 TCGv t3
= tcg_const_tl(1);
3485 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3486 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3487 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3493 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3496 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3501 TCGv_i64 t2
= tcg_temp_new_i64();
3502 TCGv_i64 t3
= tcg_temp_new_i64();
3504 tcg_gen_ext_tl_i64(t2
, t0
);
3505 tcg_gen_ext_tl_i64(t3
, t1
);
3506 tcg_gen_mul_i64(t2
, t2
, t3
);
3507 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3508 tcg_gen_add_i64(t2
, t2
, t3
);
3509 tcg_temp_free_i64(t3
);
3510 gen_move_low32(cpu_LO
[acc
], t2
);
3511 gen_move_high32(cpu_HI
[acc
], t2
);
3512 tcg_temp_free_i64(t2
);
3517 TCGv_i64 t2
= tcg_temp_new_i64();
3518 TCGv_i64 t3
= tcg_temp_new_i64();
3520 tcg_gen_ext32u_tl(t0
, t0
);
3521 tcg_gen_ext32u_tl(t1
, t1
);
3522 tcg_gen_extu_tl_i64(t2
, t0
);
3523 tcg_gen_extu_tl_i64(t3
, t1
);
3524 tcg_gen_mul_i64(t2
, t2
, t3
);
3525 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3526 tcg_gen_add_i64(t2
, t2
, t3
);
3527 tcg_temp_free_i64(t3
);
3528 gen_move_low32(cpu_LO
[acc
], t2
);
3529 gen_move_high32(cpu_HI
[acc
], t2
);
3530 tcg_temp_free_i64(t2
);
3535 TCGv_i64 t2
= tcg_temp_new_i64();
3536 TCGv_i64 t3
= tcg_temp_new_i64();
3538 tcg_gen_ext_tl_i64(t2
, t0
);
3539 tcg_gen_ext_tl_i64(t3
, t1
);
3540 tcg_gen_mul_i64(t2
, t2
, t3
);
3541 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3542 tcg_gen_sub_i64(t2
, t3
, t2
);
3543 tcg_temp_free_i64(t3
);
3544 gen_move_low32(cpu_LO
[acc
], t2
);
3545 gen_move_high32(cpu_HI
[acc
], t2
);
3546 tcg_temp_free_i64(t2
);
3551 TCGv_i64 t2
= tcg_temp_new_i64();
3552 TCGv_i64 t3
= tcg_temp_new_i64();
3554 tcg_gen_ext32u_tl(t0
, t0
);
3555 tcg_gen_ext32u_tl(t1
, t1
);
3556 tcg_gen_extu_tl_i64(t2
, t0
);
3557 tcg_gen_extu_tl_i64(t3
, t1
);
3558 tcg_gen_mul_i64(t2
, t2
, t3
);
3559 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3560 tcg_gen_sub_i64(t2
, t3
, t2
);
3561 tcg_temp_free_i64(t3
);
3562 gen_move_low32(cpu_LO
[acc
], t2
);
3563 gen_move_high32(cpu_HI
[acc
], t2
);
3564 tcg_temp_free_i64(t2
);
3568 MIPS_INVAL("mul/div");
3569 gen_reserved_instruction(ctx
);
3578 * These MULT[U] and MADD[U] instructions implemented in for example
3579 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
3580 * architectures are special three-operand variants with the syntax
3582 * MULT[U][1] rd, rs, rt
3586 * (rd, LO, HI) <- rs * rt
3590 * MADD[U][1] rd, rs, rt
3594 * (rd, LO, HI) <- (LO, HI) + rs * rt
3596 * where the low-order 32-bits of the result is placed into both the
3597 * GPR rd and the special register LO. The high-order 32-bits of the
3598 * result is placed into the special register HI.
3600 * If the GPR rd is omitted in assembly language, it is taken to be 0,
3601 * which is the zero register that always reads as 0.
3603 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
3604 int rd
, int rs
, int rt
)
3606 TCGv t0
= tcg_temp_new();
3607 TCGv t1
= tcg_temp_new();
3610 gen_load_gpr(t0
, rs
);
3611 gen_load_gpr(t1
, rt
);
3619 TCGv_i32 t2
= tcg_temp_new_i32();
3620 TCGv_i32 t3
= tcg_temp_new_i32();
3621 tcg_gen_trunc_tl_i32(t2
, t0
);
3622 tcg_gen_trunc_tl_i32(t3
, t1
);
3623 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3625 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3627 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3628 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3629 tcg_temp_free_i32(t2
);
3630 tcg_temp_free_i32(t3
);
3633 case MMI_OPC_MULTU1
:
3638 TCGv_i32 t2
= tcg_temp_new_i32();
3639 TCGv_i32 t3
= tcg_temp_new_i32();
3640 tcg_gen_trunc_tl_i32(t2
, t0
);
3641 tcg_gen_trunc_tl_i32(t3
, t1
);
3642 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3644 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3646 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3647 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3648 tcg_temp_free_i32(t2
);
3649 tcg_temp_free_i32(t3
);
3657 TCGv_i64 t2
= tcg_temp_new_i64();
3658 TCGv_i64 t3
= tcg_temp_new_i64();
3660 tcg_gen_ext_tl_i64(t2
, t0
);
3661 tcg_gen_ext_tl_i64(t3
, t1
);
3662 tcg_gen_mul_i64(t2
, t2
, t3
);
3663 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3664 tcg_gen_add_i64(t2
, t2
, t3
);
3665 tcg_temp_free_i64(t3
);
3666 gen_move_low32(cpu_LO
[acc
], t2
);
3667 gen_move_high32(cpu_HI
[acc
], t2
);
3669 gen_move_low32(cpu_gpr
[rd
], t2
);
3671 tcg_temp_free_i64(t2
);
3674 case MMI_OPC_MADDU1
:
3679 TCGv_i64 t2
= tcg_temp_new_i64();
3680 TCGv_i64 t3
= tcg_temp_new_i64();
3682 tcg_gen_ext32u_tl(t0
, t0
);
3683 tcg_gen_ext32u_tl(t1
, t1
);
3684 tcg_gen_extu_tl_i64(t2
, t0
);
3685 tcg_gen_extu_tl_i64(t3
, t1
);
3686 tcg_gen_mul_i64(t2
, t2
, t3
);
3687 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3688 tcg_gen_add_i64(t2
, t2
, t3
);
3689 tcg_temp_free_i64(t3
);
3690 gen_move_low32(cpu_LO
[acc
], t2
);
3691 gen_move_high32(cpu_HI
[acc
], t2
);
3693 gen_move_low32(cpu_gpr
[rd
], t2
);
3695 tcg_temp_free_i64(t2
);
3699 MIPS_INVAL("mul/madd TXx9");
3700 gen_reserved_instruction(ctx
);
3709 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
3719 gen_load_gpr(t0
, rs
);
3724 #if defined(TARGET_MIPS64)
3728 tcg_gen_not_tl(t0
, t0
);
3737 tcg_gen_ext32u_tl(t0
, t0
);
3738 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
3739 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
3741 #if defined(TARGET_MIPS64)
3746 tcg_gen_clzi_i64(t0
, t0
, 64);
3752 /* Godson integer instructions */
3753 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3754 int rd
, int rs
, int rt
)
3766 case OPC_MULTU_G_2E
:
3767 case OPC_MULTU_G_2F
:
3768 #if defined(TARGET_MIPS64)
3769 case OPC_DMULT_G_2E
:
3770 case OPC_DMULT_G_2F
:
3771 case OPC_DMULTU_G_2E
:
3772 case OPC_DMULTU_G_2F
:
3774 t0
= tcg_temp_new();
3775 t1
= tcg_temp_new();
3778 t0
= tcg_temp_local_new();
3779 t1
= tcg_temp_local_new();
3783 gen_load_gpr(t0
, rs
);
3784 gen_load_gpr(t1
, rt
);
3789 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3790 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3792 case OPC_MULTU_G_2E
:
3793 case OPC_MULTU_G_2F
:
3794 tcg_gen_ext32u_tl(t0
, t0
);
3795 tcg_gen_ext32u_tl(t1
, t1
);
3796 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3797 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3802 TCGLabel
*l1
= gen_new_label();
3803 TCGLabel
*l2
= gen_new_label();
3804 TCGLabel
*l3
= gen_new_label();
3805 tcg_gen_ext32s_tl(t0
, t0
);
3806 tcg_gen_ext32s_tl(t1
, t1
);
3807 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3808 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3811 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3812 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3813 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3816 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3817 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3824 TCGLabel
*l1
= gen_new_label();
3825 TCGLabel
*l2
= gen_new_label();
3826 tcg_gen_ext32u_tl(t0
, t0
);
3827 tcg_gen_ext32u_tl(t1
, t1
);
3828 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3829 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3832 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3833 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3840 TCGLabel
*l1
= gen_new_label();
3841 TCGLabel
*l2
= gen_new_label();
3842 TCGLabel
*l3
= gen_new_label();
3843 tcg_gen_ext32u_tl(t0
, t0
);
3844 tcg_gen_ext32u_tl(t1
, t1
);
3845 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3846 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3847 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3849 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3852 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3853 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3860 TCGLabel
*l1
= gen_new_label();
3861 TCGLabel
*l2
= gen_new_label();
3862 tcg_gen_ext32u_tl(t0
, t0
);
3863 tcg_gen_ext32u_tl(t1
, t1
);
3864 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3865 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3868 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3869 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3873 #if defined(TARGET_MIPS64)
3874 case OPC_DMULT_G_2E
:
3875 case OPC_DMULT_G_2F
:
3876 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3878 case OPC_DMULTU_G_2E
:
3879 case OPC_DMULTU_G_2F
:
3880 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3885 TCGLabel
*l1
= gen_new_label();
3886 TCGLabel
*l2
= gen_new_label();
3887 TCGLabel
*l3
= gen_new_label();
3888 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3889 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3892 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3893 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3894 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3897 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3901 case OPC_DDIVU_G_2E
:
3902 case OPC_DDIVU_G_2F
:
3904 TCGLabel
*l1
= gen_new_label();
3905 TCGLabel
*l2
= gen_new_label();
3906 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3907 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3910 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3917 TCGLabel
*l1
= gen_new_label();
3918 TCGLabel
*l2
= gen_new_label();
3919 TCGLabel
*l3
= gen_new_label();
3920 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3921 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3922 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3924 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3927 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3931 case OPC_DMODU_G_2E
:
3932 case OPC_DMODU_G_2F
:
3934 TCGLabel
*l1
= gen_new_label();
3935 TCGLabel
*l2
= gen_new_label();
3936 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3937 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3940 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3951 /* Loongson multimedia instructions */
3952 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3954 uint32_t opc
, shift_max
;
3958 opc
= MASK_LMMI(ctx
->opcode
);
3964 t0
= tcg_temp_local_new_i64();
3965 t1
= tcg_temp_local_new_i64();
3968 t0
= tcg_temp_new_i64();
3969 t1
= tcg_temp_new_i64();
3973 check_cp1_enabled(ctx
);
3974 gen_load_fpr64(ctx
, t0
, rs
);
3975 gen_load_fpr64(ctx
, t1
, rt
);
3979 gen_helper_paddsh(t0
, t0
, t1
);
3982 gen_helper_paddush(t0
, t0
, t1
);
3985 gen_helper_paddh(t0
, t0
, t1
);
3988 gen_helper_paddw(t0
, t0
, t1
);
3991 gen_helper_paddsb(t0
, t0
, t1
);
3994 gen_helper_paddusb(t0
, t0
, t1
);
3997 gen_helper_paddb(t0
, t0
, t1
);
4001 gen_helper_psubsh(t0
, t0
, t1
);
4004 gen_helper_psubush(t0
, t0
, t1
);
4007 gen_helper_psubh(t0
, t0
, t1
);
4010 gen_helper_psubw(t0
, t0
, t1
);
4013 gen_helper_psubsb(t0
, t0
, t1
);
4016 gen_helper_psubusb(t0
, t0
, t1
);
4019 gen_helper_psubb(t0
, t0
, t1
);
4023 gen_helper_pshufh(t0
, t0
, t1
);
4026 gen_helper_packsswh(t0
, t0
, t1
);
4029 gen_helper_packsshb(t0
, t0
, t1
);
4032 gen_helper_packushb(t0
, t0
, t1
);
4036 gen_helper_punpcklhw(t0
, t0
, t1
);
4039 gen_helper_punpckhhw(t0
, t0
, t1
);
4042 gen_helper_punpcklbh(t0
, t0
, t1
);
4045 gen_helper_punpckhbh(t0
, t0
, t1
);
4048 gen_helper_punpcklwd(t0
, t0
, t1
);
4051 gen_helper_punpckhwd(t0
, t0
, t1
);
4055 gen_helper_pavgh(t0
, t0
, t1
);
4058 gen_helper_pavgb(t0
, t0
, t1
);
4061 gen_helper_pmaxsh(t0
, t0
, t1
);
4064 gen_helper_pminsh(t0
, t0
, t1
);
4067 gen_helper_pmaxub(t0
, t0
, t1
);
4070 gen_helper_pminub(t0
, t0
, t1
);
4074 gen_helper_pcmpeqw(t0
, t0
, t1
);
4077 gen_helper_pcmpgtw(t0
, t0
, t1
);
4080 gen_helper_pcmpeqh(t0
, t0
, t1
);
4083 gen_helper_pcmpgth(t0
, t0
, t1
);
4086 gen_helper_pcmpeqb(t0
, t0
, t1
);
4089 gen_helper_pcmpgtb(t0
, t0
, t1
);
4093 gen_helper_psllw(t0
, t0
, t1
);
4096 gen_helper_psllh(t0
, t0
, t1
);
4099 gen_helper_psrlw(t0
, t0
, t1
);
4102 gen_helper_psrlh(t0
, t0
, t1
);
4105 gen_helper_psraw(t0
, t0
, t1
);
4108 gen_helper_psrah(t0
, t0
, t1
);
4112 gen_helper_pmullh(t0
, t0
, t1
);
4115 gen_helper_pmulhh(t0
, t0
, t1
);
4118 gen_helper_pmulhuh(t0
, t0
, t1
);
4121 gen_helper_pmaddhw(t0
, t0
, t1
);
4125 gen_helper_pasubub(t0
, t0
, t1
);
4128 gen_helper_biadd(t0
, t0
);
4131 gen_helper_pmovmskb(t0
, t0
);
4135 tcg_gen_add_i64(t0
, t0
, t1
);
4138 tcg_gen_sub_i64(t0
, t0
, t1
);
4141 tcg_gen_xor_i64(t0
, t0
, t1
);
4144 tcg_gen_nor_i64(t0
, t0
, t1
);
4147 tcg_gen_and_i64(t0
, t0
, t1
);
4150 tcg_gen_or_i64(t0
, t0
, t1
);
4154 tcg_gen_andc_i64(t0
, t1
, t0
);
4158 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
4161 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
4164 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
4167 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
4171 tcg_gen_andi_i64(t1
, t1
, 3);
4172 tcg_gen_shli_i64(t1
, t1
, 4);
4173 tcg_gen_shr_i64(t0
, t0
, t1
);
4174 tcg_gen_ext16u_i64(t0
, t0
);
4178 tcg_gen_add_i64(t0
, t0
, t1
);
4179 tcg_gen_ext32s_i64(t0
, t0
);
4182 tcg_gen_sub_i64(t0
, t0
, t1
);
4183 tcg_gen_ext32s_i64(t0
, t0
);
4205 /* Make sure shift count isn't TCG undefined behaviour. */
4206 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4211 tcg_gen_shl_i64(t0
, t0
, t1
);
4216 * Since SRA is UndefinedResult without sign-extended inputs,
4217 * we can treat SRA and DSRA the same.
4219 tcg_gen_sar_i64(t0
, t0
, t1
);
4222 /* We want to shift in zeros for SRL; zero-extend first. */
4223 tcg_gen_ext32u_i64(t0
, t0
);
4226 tcg_gen_shr_i64(t0
, t0
, t1
);
4230 if (shift_max
== 32) {
4231 tcg_gen_ext32s_i64(t0
, t0
);
4234 /* Shifts larger than MAX produce zero. */
4235 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4236 tcg_gen_neg_i64(t1
, t1
);
4237 tcg_gen_and_i64(t0
, t0
, t1
);
4243 TCGv_i64 t2
= tcg_temp_new_i64();
4244 TCGLabel
*lab
= gen_new_label();
4246 tcg_gen_mov_i64(t2
, t0
);
4247 tcg_gen_add_i64(t0
, t1
, t2
);
4248 if (opc
== OPC_ADD_CP2
) {
4249 tcg_gen_ext32s_i64(t0
, t0
);
4251 tcg_gen_xor_i64(t1
, t1
, t2
);
4252 tcg_gen_xor_i64(t2
, t2
, t0
);
4253 tcg_gen_andc_i64(t1
, t2
, t1
);
4254 tcg_temp_free_i64(t2
);
4255 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4256 generate_exception(ctx
, EXCP_OVERFLOW
);
4264 TCGv_i64 t2
= tcg_temp_new_i64();
4265 TCGLabel
*lab
= gen_new_label();
4267 tcg_gen_mov_i64(t2
, t0
);
4268 tcg_gen_sub_i64(t0
, t1
, t2
);
4269 if (opc
== OPC_SUB_CP2
) {
4270 tcg_gen_ext32s_i64(t0
, t0
);
4272 tcg_gen_xor_i64(t1
, t1
, t2
);
4273 tcg_gen_xor_i64(t2
, t2
, t0
);
4274 tcg_gen_and_i64(t1
, t1
, t2
);
4275 tcg_temp_free_i64(t2
);
4276 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4277 generate_exception(ctx
, EXCP_OVERFLOW
);
4283 tcg_gen_ext32u_i64(t0
, t0
);
4284 tcg_gen_ext32u_i64(t1
, t1
);
4285 tcg_gen_mul_i64(t0
, t0
, t1
);
4294 cond
= TCG_COND_LTU
;
4302 cond
= TCG_COND_LEU
;
4309 int cc
= (ctx
->opcode
>> 8) & 0x7;
4310 TCGv_i64 t64
= tcg_temp_new_i64();
4311 TCGv_i32 t32
= tcg_temp_new_i32();
4313 tcg_gen_setcond_i64(cond
, t64
, t0
, t1
);
4314 tcg_gen_extrl_i64_i32(t32
, t64
);
4315 tcg_gen_deposit_i32(fpu_fcr31
, fpu_fcr31
, t32
,
4318 tcg_temp_free_i32(t32
);
4319 tcg_temp_free_i64(t64
);
4324 MIPS_INVAL("loongson_cp2");
4325 gen_reserved_instruction(ctx
);
4329 gen_store_fpr64(ctx
, t0
, rd
);
4332 tcg_temp_free_i64(t0
);
4333 tcg_temp_free_i64(t1
);
4336 static void gen_loongson_lswc2(DisasContext
*ctx
, int rt
,
4341 #if defined(TARGET_MIPS64)
4342 int lsq_rt1
= ctx
->opcode
& 0x1f;
4343 int lsq_offset
= sextract32(ctx
->opcode
, 6, 9) << 4;
4345 int shf_offset
= sextract32(ctx
->opcode
, 6, 8);
4347 t0
= tcg_temp_new();
4349 switch (MASK_LOONGSON_GSLSQ(ctx
->opcode
)) {
4350 #if defined(TARGET_MIPS64)
4352 t1
= tcg_temp_new();
4353 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4354 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4355 ctx
->default_tcg_memop_mask
);
4356 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4357 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4358 ctx
->default_tcg_memop_mask
);
4359 gen_store_gpr(t1
, rt
);
4360 gen_store_gpr(t0
, lsq_rt1
);
4364 check_cp1_enabled(ctx
);
4365 t1
= tcg_temp_new();
4366 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4367 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4368 ctx
->default_tcg_memop_mask
);
4369 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4370 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4371 ctx
->default_tcg_memop_mask
);
4372 gen_store_fpr64(ctx
, t1
, rt
);
4373 gen_store_fpr64(ctx
, t0
, lsq_rt1
);
4377 t1
= tcg_temp_new();
4378 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4379 gen_load_gpr(t1
, rt
);
4380 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4381 ctx
->default_tcg_memop_mask
);
4382 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4383 gen_load_gpr(t1
, lsq_rt1
);
4384 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4385 ctx
->default_tcg_memop_mask
);
4389 check_cp1_enabled(ctx
);
4390 t1
= tcg_temp_new();
4391 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
4392 gen_load_fpr64(ctx
, t1
, rt
);
4393 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4394 ctx
->default_tcg_memop_mask
);
4395 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
4396 gen_load_fpr64(ctx
, t1
, lsq_rt1
);
4397 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4398 ctx
->default_tcg_memop_mask
);
4403 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4405 check_cp1_enabled(ctx
);
4406 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4407 t1
= tcg_temp_new();
4408 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4409 tcg_gen_andi_tl(t1
, t0
, 3);
4410 if (!cpu_is_bigendian(ctx
)) {
4411 tcg_gen_xori_tl(t1
, t1
, 3);
4413 tcg_gen_shli_tl(t1
, t1
, 3);
4414 tcg_gen_andi_tl(t0
, t0
, ~3);
4415 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4416 tcg_gen_shl_tl(t0
, t0
, t1
);
4417 t2
= tcg_const_tl(-1);
4418 tcg_gen_shl_tl(t2
, t2
, t1
);
4419 fp0
= tcg_temp_new_i32();
4420 gen_load_fpr32(ctx
, fp0
, rt
);
4421 tcg_gen_ext_i32_tl(t1
, fp0
);
4422 tcg_gen_andc_tl(t1
, t1
, t2
);
4424 tcg_gen_or_tl(t0
, t0
, t1
);
4426 #if defined(TARGET_MIPS64)
4427 tcg_gen_extrl_i64_i32(fp0
, t0
);
4429 tcg_gen_ext32s_tl(fp0
, t0
);
4431 gen_store_fpr32(ctx
, fp0
, rt
);
4432 tcg_temp_free_i32(fp0
);
4435 check_cp1_enabled(ctx
);
4436 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4437 t1
= tcg_temp_new();
4438 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4439 tcg_gen_andi_tl(t1
, t0
, 3);
4440 if (cpu_is_bigendian(ctx
)) {
4441 tcg_gen_xori_tl(t1
, t1
, 3);
4443 tcg_gen_shli_tl(t1
, t1
, 3);
4444 tcg_gen_andi_tl(t0
, t0
, ~3);
4445 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
4446 tcg_gen_shr_tl(t0
, t0
, t1
);
4447 tcg_gen_xori_tl(t1
, t1
, 31);
4448 t2
= tcg_const_tl(0xfffffffeull
);
4449 tcg_gen_shl_tl(t2
, t2
, t1
);
4450 fp0
= tcg_temp_new_i32();
4451 gen_load_fpr32(ctx
, fp0
, rt
);
4452 tcg_gen_ext_i32_tl(t1
, fp0
);
4453 tcg_gen_and_tl(t1
, t1
, t2
);
4455 tcg_gen_or_tl(t0
, t0
, t1
);
4457 #if defined(TARGET_MIPS64)
4458 tcg_gen_extrl_i64_i32(fp0
, t0
);
4460 tcg_gen_ext32s_tl(fp0
, t0
);
4462 gen_store_fpr32(ctx
, fp0
, rt
);
4463 tcg_temp_free_i32(fp0
);
4465 #if defined(TARGET_MIPS64)
4467 check_cp1_enabled(ctx
);
4468 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4469 t1
= tcg_temp_new();
4470 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4471 tcg_gen_andi_tl(t1
, t0
, 7);
4472 if (!cpu_is_bigendian(ctx
)) {
4473 tcg_gen_xori_tl(t1
, t1
, 7);
4475 tcg_gen_shli_tl(t1
, t1
, 3);
4476 tcg_gen_andi_tl(t0
, t0
, ~7);
4477 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4478 tcg_gen_shl_tl(t0
, t0
, t1
);
4479 t2
= tcg_const_tl(-1);
4480 tcg_gen_shl_tl(t2
, t2
, t1
);
4481 gen_load_fpr64(ctx
, t1
, rt
);
4482 tcg_gen_andc_tl(t1
, t1
, t2
);
4484 tcg_gen_or_tl(t0
, t0
, t1
);
4486 gen_store_fpr64(ctx
, t0
, rt
);
4489 check_cp1_enabled(ctx
);
4490 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4491 t1
= tcg_temp_new();
4492 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
4493 tcg_gen_andi_tl(t1
, t0
, 7);
4494 if (cpu_is_bigendian(ctx
)) {
4495 tcg_gen_xori_tl(t1
, t1
, 7);
4497 tcg_gen_shli_tl(t1
, t1
, 3);
4498 tcg_gen_andi_tl(t0
, t0
, ~7);
4499 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
4500 tcg_gen_shr_tl(t0
, t0
, t1
);
4501 tcg_gen_xori_tl(t1
, t1
, 63);
4502 t2
= tcg_const_tl(0xfffffffffffffffeull
);
4503 tcg_gen_shl_tl(t2
, t2
, t1
);
4504 gen_load_fpr64(ctx
, t1
, rt
);
4505 tcg_gen_and_tl(t1
, t1
, t2
);
4507 tcg_gen_or_tl(t0
, t0
, t1
);
4509 gen_store_fpr64(ctx
, t0
, rt
);
4513 MIPS_INVAL("loongson_gsshfl");
4514 gen_reserved_instruction(ctx
);
4519 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
4521 check_cp1_enabled(ctx
);
4522 t1
= tcg_temp_new();
4523 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4524 fp0
= tcg_temp_new_i32();
4525 gen_load_fpr32(ctx
, fp0
, rt
);
4526 tcg_gen_ext_i32_tl(t1
, fp0
);
4527 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
4528 tcg_temp_free_i32(fp0
);
4532 check_cp1_enabled(ctx
);
4533 t1
= tcg_temp_new();
4534 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4535 fp0
= tcg_temp_new_i32();
4536 gen_load_fpr32(ctx
, fp0
, rt
);
4537 tcg_gen_ext_i32_tl(t1
, fp0
);
4538 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
4539 tcg_temp_free_i32(fp0
);
4542 #if defined(TARGET_MIPS64)
4544 check_cp1_enabled(ctx
);
4545 t1
= tcg_temp_new();
4546 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4547 gen_load_fpr64(ctx
, t1
, rt
);
4548 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
4552 check_cp1_enabled(ctx
);
4553 t1
= tcg_temp_new();
4554 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
4555 gen_load_fpr64(ctx
, t1
, rt
);
4556 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
4561 MIPS_INVAL("loongson_gsshfs");
4562 gen_reserved_instruction(ctx
);
4567 MIPS_INVAL("loongson_gslsq");
4568 gen_reserved_instruction(ctx
);
4574 /* Loongson EXT LDC2/SDC2 */
4575 static void gen_loongson_lsdc2(DisasContext
*ctx
, int rt
,
4578 int offset
= sextract32(ctx
->opcode
, 3, 8);
4579 uint32_t opc
= MASK_LOONGSON_LSDC2(ctx
->opcode
);
4583 /* Pre-conditions */
4589 /* prefetch, implement as NOP */
4600 #if defined(TARGET_MIPS64)
4603 check_cp1_enabled(ctx
);
4604 /* prefetch, implement as NOP */
4610 #if defined(TARGET_MIPS64)
4613 check_cp1_enabled(ctx
);
4616 MIPS_INVAL("loongson_lsdc2");
4617 gen_reserved_instruction(ctx
);
4622 t0
= tcg_temp_new();
4624 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4625 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4629 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
4630 gen_store_gpr(t0
, rt
);
4633 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
4634 ctx
->default_tcg_memop_mask
);
4635 gen_store_gpr(t0
, rt
);
4638 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4640 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4642 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
4643 ctx
->default_tcg_memop_mask
);
4644 gen_store_gpr(t0
, rt
);
4646 #if defined(TARGET_MIPS64)
4648 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4650 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4652 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4653 ctx
->default_tcg_memop_mask
);
4654 gen_store_gpr(t0
, rt
);
4658 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4660 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4662 fp0
= tcg_temp_new_i32();
4663 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
4664 ctx
->default_tcg_memop_mask
);
4665 gen_store_fpr32(ctx
, fp0
, rt
);
4666 tcg_temp_free_i32(fp0
);
4668 #if defined(TARGET_MIPS64)
4670 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
4672 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
4674 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4675 ctx
->default_tcg_memop_mask
);
4676 gen_store_fpr64(ctx
, t0
, rt
);
4680 t1
= tcg_temp_new();
4681 gen_load_gpr(t1
, rt
);
4682 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
4686 t1
= tcg_temp_new();
4687 gen_load_gpr(t1
, rt
);
4688 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
4689 ctx
->default_tcg_memop_mask
);
4693 t1
= tcg_temp_new();
4694 gen_load_gpr(t1
, rt
);
4695 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
4696 ctx
->default_tcg_memop_mask
);
4699 #if defined(TARGET_MIPS64)
4701 t1
= tcg_temp_new();
4702 gen_load_gpr(t1
, rt
);
4703 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4704 ctx
->default_tcg_memop_mask
);
4709 fp0
= tcg_temp_new_i32();
4710 gen_load_fpr32(ctx
, fp0
, rt
);
4711 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
4712 ctx
->default_tcg_memop_mask
);
4713 tcg_temp_free_i32(fp0
);
4715 #if defined(TARGET_MIPS64)
4717 t1
= tcg_temp_new();
4718 gen_load_fpr64(ctx
, t1
, rt
);
4719 tcg_gen_qemu_st_i64(t1
, t0
, ctx
->mem_idx
, MO_TEUQ
|
4720 ctx
->default_tcg_memop_mask
);
4732 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
4733 int rs
, int rt
, int16_t imm
, int code
)
4736 TCGv t0
= tcg_temp_new();
4737 TCGv t1
= tcg_temp_new();
4740 /* Load needed operands */
4748 /* Compare two registers */
4750 gen_load_gpr(t0
, rs
);
4751 gen_load_gpr(t1
, rt
);
4761 /* Compare register to immediate */
4762 if (rs
!= 0 || imm
!= 0) {
4763 gen_load_gpr(t0
, rs
);
4764 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4771 case OPC_TEQ
: /* rs == rs */
4772 case OPC_TEQI
: /* r0 == 0 */
4773 case OPC_TGE
: /* rs >= rs */
4774 case OPC_TGEI
: /* r0 >= 0 */
4775 case OPC_TGEU
: /* rs >= rs unsigned */
4776 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4778 #ifdef CONFIG_USER_ONLY
4779 /* Pass the break code along to cpu_loop. */
4780 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4781 offsetof(CPUMIPSState
, error_code
));
4783 generate_exception_end(ctx
, EXCP_TRAP
);
4785 case OPC_TLT
: /* rs < rs */
4786 case OPC_TLTI
: /* r0 < 0 */
4787 case OPC_TLTU
: /* rs < rs unsigned */
4788 case OPC_TLTIU
: /* r0 < 0 unsigned */
4789 case OPC_TNE
: /* rs != rs */
4790 case OPC_TNEI
: /* r0 != 0 */
4791 /* Never trap: treat as NOP. */
4795 TCGLabel
*l1
= gen_new_label();
4800 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4804 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4808 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4812 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4816 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4820 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4823 #ifdef CONFIG_USER_ONLY
4824 /* Pass the break code along to cpu_loop. */
4825 tcg_gen_st_i32(tcg_constant_i32(code
), cpu_env
,
4826 offsetof(CPUMIPSState
, error_code
));
4828 /* Like save_cpu_state, only don't update saved values. */
4829 if (ctx
->base
.pc_next
!= ctx
->saved_pc
) {
4830 gen_save_pc(ctx
->base
.pc_next
);
4832 if (ctx
->hflags
!= ctx
->saved_hflags
) {
4833 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
4835 generate_exception(ctx
, EXCP_TRAP
);
4842 static void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4844 if (translator_use_goto_tb(&ctx
->base
, dest
)) {
4847 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
4850 tcg_gen_lookup_and_goto_ptr();
4854 /* Branches (before delay slot) */
4855 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
4857 int rs
, int rt
, int32_t offset
,
4860 target_ulong btgt
= -1;
4862 int bcond_compute
= 0;
4863 TCGv t0
= tcg_temp_new();
4864 TCGv t1
= tcg_temp_new();
4866 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4867 #ifdef MIPS_DEBUG_DISAS
4868 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4869 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
4871 gen_reserved_instruction(ctx
);
4875 /* Load needed operands */
4881 /* Compare two registers */
4883 gen_load_gpr(t0
, rs
);
4884 gen_load_gpr(t1
, rt
);
4887 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4901 /* Compare to zero */
4903 gen_load_gpr(t0
, rs
);
4906 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4909 #if defined(TARGET_MIPS64)
4911 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4913 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4916 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
4921 /* Jump to immediate */
4922 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
4927 /* Jump to register */
4928 if (offset
!= 0 && offset
!= 16) {
4930 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4931 * others are reserved.
4933 MIPS_INVAL("jump hint");
4934 gen_reserved_instruction(ctx
);
4937 gen_load_gpr(btarget
, rs
);
4940 MIPS_INVAL("branch/jump");
4941 gen_reserved_instruction(ctx
);
4944 if (bcond_compute
== 0) {
4945 /* No condition to be computed */
4947 case OPC_BEQ
: /* rx == rx */
4948 case OPC_BEQL
: /* rx == rx likely */
4949 case OPC_BGEZ
: /* 0 >= 0 */
4950 case OPC_BGEZL
: /* 0 >= 0 likely */
4951 case OPC_BLEZ
: /* 0 <= 0 */
4952 case OPC_BLEZL
: /* 0 <= 0 likely */
4954 ctx
->hflags
|= MIPS_HFLAG_B
;
4956 case OPC_BGEZAL
: /* 0 >= 0 */
4957 case OPC_BGEZALL
: /* 0 >= 0 likely */
4958 /* Always take and link */
4960 ctx
->hflags
|= MIPS_HFLAG_B
;
4962 case OPC_BNE
: /* rx != rx */
4963 case OPC_BGTZ
: /* 0 > 0 */
4964 case OPC_BLTZ
: /* 0 < 0 */
4967 case OPC_BLTZAL
: /* 0 < 0 */
4969 * Handle as an unconditional branch to get correct delay
4973 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
4974 ctx
->hflags
|= MIPS_HFLAG_B
;
4976 case OPC_BLTZALL
: /* 0 < 0 likely */
4977 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
4978 /* Skip the instruction in the delay slot */
4979 ctx
->base
.pc_next
+= 4;
4981 case OPC_BNEL
: /* rx != rx likely */
4982 case OPC_BGTZL
: /* 0 > 0 likely */
4983 case OPC_BLTZL
: /* 0 < 0 likely */
4984 /* Skip the instruction in the delay slot */
4985 ctx
->base
.pc_next
+= 4;
4988 ctx
->hflags
|= MIPS_HFLAG_B
;
4991 ctx
->hflags
|= MIPS_HFLAG_BX
;
4995 ctx
->hflags
|= MIPS_HFLAG_B
;
4998 ctx
->hflags
|= MIPS_HFLAG_BR
;
5002 ctx
->hflags
|= MIPS_HFLAG_BR
;
5005 MIPS_INVAL("branch/jump");
5006 gen_reserved_instruction(ctx
);
5012 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5015 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
5018 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5021 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
5024 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5027 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5030 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5034 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
5038 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5041 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
5044 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5047 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
5050 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5053 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5056 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
5058 #if defined(TARGET_MIPS64)
5060 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
5064 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5067 ctx
->hflags
|= MIPS_HFLAG_BC
;
5070 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
5073 ctx
->hflags
|= MIPS_HFLAG_BL
;
5076 MIPS_INVAL("conditional branch/jump");
5077 gen_reserved_instruction(ctx
);
5082 ctx
->btarget
= btgt
;
5084 switch (delayslot_size
) {
5086 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
5089 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
5094 int post_delay
= insn_bytes
+ delayslot_size
;
5095 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
5097 tcg_gen_movi_tl(cpu_gpr
[blink
],
5098 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
5102 if (insn_bytes
== 2) {
5103 ctx
->hflags
|= MIPS_HFLAG_B16
;
5110 /* special3 bitfield operations */
5111 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
5112 int rs
, int lsb
, int msb
)
5114 TCGv t0
= tcg_temp_new();
5115 TCGv t1
= tcg_temp_new();
5117 gen_load_gpr(t1
, rs
);
5120 if (lsb
+ msb
> 31) {
5124 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5127 * The two checks together imply that lsb == 0,
5128 * so this is a simple sign-extension.
5130 tcg_gen_ext32s_tl(t0
, t1
);
5133 #if defined(TARGET_MIPS64)
5142 if (lsb
+ msb
> 63) {
5145 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
5152 gen_load_gpr(t0
, rt
);
5153 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5154 tcg_gen_ext32s_tl(t0
, t0
);
5156 #if defined(TARGET_MIPS64)
5167 gen_load_gpr(t0
, rt
);
5168 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
5173 MIPS_INVAL("bitops");
5174 gen_reserved_instruction(ctx
);
5179 gen_store_gpr(t0
, rt
);
5184 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
5189 /* If no destination, treat it as a NOP. */
5193 t0
= tcg_temp_new();
5194 gen_load_gpr(t0
, rt
);
5198 TCGv t1
= tcg_temp_new();
5199 TCGv t2
= tcg_const_tl(0x00FF00FF);
5201 tcg_gen_shri_tl(t1
, t0
, 8);
5202 tcg_gen_and_tl(t1
, t1
, t2
);
5203 tcg_gen_and_tl(t0
, t0
, t2
);
5204 tcg_gen_shli_tl(t0
, t0
, 8);
5205 tcg_gen_or_tl(t0
, t0
, t1
);
5208 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5212 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
5215 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
5217 #if defined(TARGET_MIPS64)
5220 TCGv t1
= tcg_temp_new();
5221 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
5223 tcg_gen_shri_tl(t1
, t0
, 8);
5224 tcg_gen_and_tl(t1
, t1
, t2
);
5225 tcg_gen_and_tl(t0
, t0
, t2
);
5226 tcg_gen_shli_tl(t0
, t0
, 8);
5227 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5234 TCGv t1
= tcg_temp_new();
5235 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
5237 tcg_gen_shri_tl(t1
, t0
, 16);
5238 tcg_gen_and_tl(t1
, t1
, t2
);
5239 tcg_gen_and_tl(t0
, t0
, t2
);
5240 tcg_gen_shli_tl(t0
, t0
, 16);
5241 tcg_gen_or_tl(t0
, t0
, t1
);
5242 tcg_gen_shri_tl(t1
, t0
, 32);
5243 tcg_gen_shli_tl(t0
, t0
, 32);
5244 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
5251 MIPS_INVAL("bsfhl");
5252 gen_reserved_instruction(ctx
);
5259 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
5267 t0
= tcg_temp_new();
5268 if (bits
== 0 || bits
== wordsz
) {
5270 gen_load_gpr(t0
, rt
);
5272 gen_load_gpr(t0
, rs
);
5276 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
5278 #if defined(TARGET_MIPS64)
5280 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
5285 TCGv t1
= tcg_temp_new();
5286 gen_load_gpr(t0
, rt
);
5287 gen_load_gpr(t1
, rs
);
5291 TCGv_i64 t2
= tcg_temp_new_i64();
5292 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
5293 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
5294 gen_move_low32(cpu_gpr
[rd
], t2
);
5295 tcg_temp_free_i64(t2
);
5298 #if defined(TARGET_MIPS64)
5300 tcg_gen_shli_tl(t0
, t0
, bits
);
5301 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
5302 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
5312 void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
, int bp
)
5314 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
5317 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
5324 t0
= tcg_temp_new();
5325 gen_load_gpr(t0
, rt
);
5328 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
5330 #if defined(TARGET_MIPS64)
5332 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
5339 #ifndef CONFIG_USER_ONLY
5340 /* CP0 (MMU and control) */
5341 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
5343 TCGv_i64 t0
= tcg_temp_new_i64();
5344 TCGv_i64 t1
= tcg_temp_new_i64();
5346 tcg_gen_ext_tl_i64(t0
, arg
);
5347 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5348 #if defined(TARGET_MIPS64)
5349 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
5351 tcg_gen_concat32_i64(t1
, t1
, t0
);
5353 tcg_gen_st_i64(t1
, cpu_env
, off
);
5354 tcg_temp_free_i64(t1
);
5355 tcg_temp_free_i64(t0
);
5358 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
5360 TCGv_i64 t0
= tcg_temp_new_i64();
5361 TCGv_i64 t1
= tcg_temp_new_i64();
5363 tcg_gen_ext_tl_i64(t0
, arg
);
5364 tcg_gen_ld_i64(t1
, cpu_env
, off
);
5365 tcg_gen_concat32_i64(t1
, t1
, t0
);
5366 tcg_gen_st_i64(t1
, cpu_env
, off
);
5367 tcg_temp_free_i64(t1
);
5368 tcg_temp_free_i64(t0
);
5371 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
5373 TCGv_i64 t0
= tcg_temp_new_i64();
5375 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5376 #if defined(TARGET_MIPS64)
5377 tcg_gen_shri_i64(t0
, t0
, 30);
5379 tcg_gen_shri_i64(t0
, t0
, 32);
5381 gen_move_low32(arg
, t0
);
5382 tcg_temp_free_i64(t0
);
5385 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
5387 TCGv_i64 t0
= tcg_temp_new_i64();
5389 tcg_gen_ld_i64(t0
, cpu_env
, off
);
5390 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
5391 gen_move_low32(arg
, t0
);
5392 tcg_temp_free_i64(t0
);
5395 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
5397 TCGv_i32 t0
= tcg_temp_new_i32();
5399 tcg_gen_ld_i32(t0
, cpu_env
, off
);
5400 tcg_gen_ext_i32_tl(arg
, t0
);
5401 tcg_temp_free_i32(t0
);
5404 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
5406 tcg_gen_ld_tl(arg
, cpu_env
, off
);
5407 tcg_gen_ext32s_tl(arg
, arg
);
5410 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
5412 TCGv_i32 t0
= tcg_temp_new_i32();
5414 tcg_gen_trunc_tl_i32(t0
, arg
);
5415 tcg_gen_st_i32(t0
, cpu_env
, off
);
5416 tcg_temp_free_i32(t0
);
5419 #define CP0_CHECK(c) \
5422 goto cp0_unimplemented; \
5426 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5428 const char *register_name
= "invalid";
5431 case CP0_REGISTER_02
:
5434 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5435 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5436 register_name
= "EntryLo0";
5439 goto cp0_unimplemented
;
5442 case CP0_REGISTER_03
:
5444 case CP0_REG03__ENTRYLO1
:
5445 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5446 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5447 register_name
= "EntryLo1";
5450 goto cp0_unimplemented
;
5453 case CP0_REGISTER_09
:
5455 case CP0_REG09__SAAR
:
5456 CP0_CHECK(ctx
->saar
);
5457 gen_helper_mfhc0_saar(arg
, cpu_env
);
5458 register_name
= "SAAR";
5461 goto cp0_unimplemented
;
5464 case CP0_REGISTER_17
:
5466 case CP0_REG17__LLADDR
:
5467 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
5468 ctx
->CP0_LLAddr_shift
);
5469 register_name
= "LLAddr";
5471 case CP0_REG17__MAAR
:
5472 CP0_CHECK(ctx
->mrp
);
5473 gen_helper_mfhc0_maar(arg
, cpu_env
);
5474 register_name
= "MAAR";
5477 goto cp0_unimplemented
;
5480 case CP0_REGISTER_19
:
5482 case CP0_REG19__WATCHHI0
:
5483 case CP0_REG19__WATCHHI1
:
5484 case CP0_REG19__WATCHHI2
:
5485 case CP0_REG19__WATCHHI3
:
5486 case CP0_REG19__WATCHHI4
:
5487 case CP0_REG19__WATCHHI5
:
5488 case CP0_REG19__WATCHHI6
:
5489 case CP0_REG19__WATCHHI7
:
5490 /* upper 32 bits are only available when Config5MI != 0 */
5492 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_WatchHi
[sel
]), 0);
5493 register_name
= "WatchHi";
5496 goto cp0_unimplemented
;
5499 case CP0_REGISTER_28
:
5505 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
5506 register_name
= "TagLo";
5509 goto cp0_unimplemented
;
5513 goto cp0_unimplemented
;
5515 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
5519 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
5520 register_name
, reg
, sel
);
5521 tcg_gen_movi_tl(arg
, 0);
5524 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5526 const char *register_name
= "invalid";
5527 uint64_t mask
= ctx
->PAMask
>> 36;
5530 case CP0_REGISTER_02
:
5533 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5534 tcg_gen_andi_tl(arg
, arg
, mask
);
5535 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
5536 register_name
= "EntryLo0";
5539 goto cp0_unimplemented
;
5542 case CP0_REGISTER_03
:
5544 case CP0_REG03__ENTRYLO1
:
5545 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
5546 tcg_gen_andi_tl(arg
, arg
, mask
);
5547 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
5548 register_name
= "EntryLo1";
5551 goto cp0_unimplemented
;
5554 case CP0_REGISTER_09
:
5556 case CP0_REG09__SAAR
:
5557 CP0_CHECK(ctx
->saar
);
5558 gen_helper_mthc0_saar(cpu_env
, arg
);
5559 register_name
= "SAAR";
5562 goto cp0_unimplemented
;
5565 case CP0_REGISTER_17
:
5567 case CP0_REG17__LLADDR
:
5569 * LLAddr is read-only (the only exception is bit 0 if LLB is
5570 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
5571 * relevant for modern MIPS cores supporting MTHC0, therefore
5572 * treating MTHC0 to LLAddr as NOP.
5574 register_name
= "LLAddr";
5576 case CP0_REG17__MAAR
:
5577 CP0_CHECK(ctx
->mrp
);
5578 gen_helper_mthc0_maar(cpu_env
, arg
);
5579 register_name
= "MAAR";
5582 goto cp0_unimplemented
;
5585 case CP0_REGISTER_19
:
5587 case CP0_REG19__WATCHHI0
:
5588 case CP0_REG19__WATCHHI1
:
5589 case CP0_REG19__WATCHHI2
:
5590 case CP0_REG19__WATCHHI3
:
5591 case CP0_REG19__WATCHHI4
:
5592 case CP0_REG19__WATCHHI5
:
5593 case CP0_REG19__WATCHHI6
:
5594 case CP0_REG19__WATCHHI7
:
5595 /* upper 32 bits are only available when Config5MI != 0 */
5597 gen_helper_0e1i(mthc0_watchhi
, arg
, sel
);
5598 register_name
= "WatchHi";
5601 goto cp0_unimplemented
;
5604 case CP0_REGISTER_28
:
5610 tcg_gen_andi_tl(arg
, arg
, mask
);
5611 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
5612 register_name
= "TagLo";
5615 goto cp0_unimplemented
;
5619 goto cp0_unimplemented
;
5621 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
5625 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
5626 register_name
, reg
, sel
);
5629 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
5631 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
5632 tcg_gen_movi_tl(arg
, 0);
5634 tcg_gen_movi_tl(arg
, ~0);
5638 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5640 const char *register_name
= "invalid";
5643 check_insn(ctx
, ISA_MIPS_R1
);
5647 case CP0_REGISTER_00
:
5649 case CP0_REG00__INDEX
:
5650 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
5651 register_name
= "Index";
5653 case CP0_REG00__MVPCONTROL
:
5654 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5655 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
5656 register_name
= "MVPControl";
5658 case CP0_REG00__MVPCONF0
:
5659 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5660 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
5661 register_name
= "MVPConf0";
5663 case CP0_REG00__MVPCONF1
:
5664 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5665 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
5666 register_name
= "MVPConf1";
5668 case CP0_REG00__VPCONTROL
:
5670 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
5671 register_name
= "VPControl";
5674 goto cp0_unimplemented
;
5677 case CP0_REGISTER_01
:
5679 case CP0_REG01__RANDOM
:
5680 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
5681 gen_helper_mfc0_random(arg
, cpu_env
);
5682 register_name
= "Random";
5684 case CP0_REG01__VPECONTROL
:
5685 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5686 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
5687 register_name
= "VPEControl";
5689 case CP0_REG01__VPECONF0
:
5690 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5691 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5692 register_name
= "VPEConf0";
5694 case CP0_REG01__VPECONF1
:
5695 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5696 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5697 register_name
= "VPEConf1";
5699 case CP0_REG01__YQMASK
:
5700 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5701 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5702 register_name
= "YQMask";
5704 case CP0_REG01__VPESCHEDULE
:
5705 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5706 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5707 register_name
= "VPESchedule";
5709 case CP0_REG01__VPESCHEFBACK
:
5710 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5711 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5712 register_name
= "VPEScheFBack";
5714 case CP0_REG01__VPEOPT
:
5715 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5716 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5717 register_name
= "VPEOpt";
5720 goto cp0_unimplemented
;
5723 case CP0_REGISTER_02
:
5725 case CP0_REG02__ENTRYLO0
:
5727 TCGv_i64 tmp
= tcg_temp_new_i64();
5728 tcg_gen_ld_i64(tmp
, cpu_env
,
5729 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5730 #if defined(TARGET_MIPS64)
5732 /* Move RI/XI fields to bits 31:30 */
5733 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5734 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5737 gen_move_low32(arg
, tmp
);
5738 tcg_temp_free_i64(tmp
);
5740 register_name
= "EntryLo0";
5742 case CP0_REG02__TCSTATUS
:
5743 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5744 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5745 register_name
= "TCStatus";
5747 case CP0_REG02__TCBIND
:
5748 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5749 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5750 register_name
= "TCBind";
5752 case CP0_REG02__TCRESTART
:
5753 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5754 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5755 register_name
= "TCRestart";
5757 case CP0_REG02__TCHALT
:
5758 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5759 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5760 register_name
= "TCHalt";
5762 case CP0_REG02__TCCONTEXT
:
5763 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5764 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5765 register_name
= "TCContext";
5767 case CP0_REG02__TCSCHEDULE
:
5768 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5769 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5770 register_name
= "TCSchedule";
5772 case CP0_REG02__TCSCHEFBACK
:
5773 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5774 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5775 register_name
= "TCScheFBack";
5778 goto cp0_unimplemented
;
5781 case CP0_REGISTER_03
:
5783 case CP0_REG03__ENTRYLO1
:
5785 TCGv_i64 tmp
= tcg_temp_new_i64();
5786 tcg_gen_ld_i64(tmp
, cpu_env
,
5787 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5788 #if defined(TARGET_MIPS64)
5790 /* Move RI/XI fields to bits 31:30 */
5791 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5792 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5795 gen_move_low32(arg
, tmp
);
5796 tcg_temp_free_i64(tmp
);
5798 register_name
= "EntryLo1";
5800 case CP0_REG03__GLOBALNUM
:
5802 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5803 register_name
= "GlobalNumber";
5806 goto cp0_unimplemented
;
5809 case CP0_REGISTER_04
:
5811 case CP0_REG04__CONTEXT
:
5812 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5813 tcg_gen_ext32s_tl(arg
, arg
);
5814 register_name
= "Context";
5816 case CP0_REG04__CONTEXTCONFIG
:
5818 /* gen_helper_mfc0_contextconfig(arg); */
5819 register_name
= "ContextConfig";
5820 goto cp0_unimplemented
;
5821 case CP0_REG04__USERLOCAL
:
5822 CP0_CHECK(ctx
->ulri
);
5823 tcg_gen_ld_tl(arg
, cpu_env
,
5824 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5825 tcg_gen_ext32s_tl(arg
, arg
);
5826 register_name
= "UserLocal";
5828 case CP0_REG04__MMID
:
5830 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
5831 register_name
= "MMID";
5834 goto cp0_unimplemented
;
5837 case CP0_REGISTER_05
:
5839 case CP0_REG05__PAGEMASK
:
5840 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5841 register_name
= "PageMask";
5843 case CP0_REG05__PAGEGRAIN
:
5844 check_insn(ctx
, ISA_MIPS_R2
);
5845 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5846 register_name
= "PageGrain";
5848 case CP0_REG05__SEGCTL0
:
5850 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
5851 tcg_gen_ext32s_tl(arg
, arg
);
5852 register_name
= "SegCtl0";
5854 case CP0_REG05__SEGCTL1
:
5856 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
5857 tcg_gen_ext32s_tl(arg
, arg
);
5858 register_name
= "SegCtl1";
5860 case CP0_REG05__SEGCTL2
:
5862 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
5863 tcg_gen_ext32s_tl(arg
, arg
);
5864 register_name
= "SegCtl2";
5866 case CP0_REG05__PWBASE
:
5868 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
5869 register_name
= "PWBase";
5871 case CP0_REG05__PWFIELD
:
5873 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
5874 register_name
= "PWField";
5876 case CP0_REG05__PWSIZE
:
5878 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
5879 register_name
= "PWSize";
5882 goto cp0_unimplemented
;
5885 case CP0_REGISTER_06
:
5887 case CP0_REG06__WIRED
:
5888 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5889 register_name
= "Wired";
5891 case CP0_REG06__SRSCONF0
:
5892 check_insn(ctx
, ISA_MIPS_R2
);
5893 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5894 register_name
= "SRSConf0";
5896 case CP0_REG06__SRSCONF1
:
5897 check_insn(ctx
, ISA_MIPS_R2
);
5898 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5899 register_name
= "SRSConf1";
5901 case CP0_REG06__SRSCONF2
:
5902 check_insn(ctx
, ISA_MIPS_R2
);
5903 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5904 register_name
= "SRSConf2";
5906 case CP0_REG06__SRSCONF3
:
5907 check_insn(ctx
, ISA_MIPS_R2
);
5908 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5909 register_name
= "SRSConf3";
5911 case CP0_REG06__SRSCONF4
:
5912 check_insn(ctx
, ISA_MIPS_R2
);
5913 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5914 register_name
= "SRSConf4";
5916 case CP0_REG06__PWCTL
:
5918 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
5919 register_name
= "PWCtl";
5922 goto cp0_unimplemented
;
5925 case CP0_REGISTER_07
:
5927 case CP0_REG07__HWRENA
:
5928 check_insn(ctx
, ISA_MIPS_R2
);
5929 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5930 register_name
= "HWREna";
5933 goto cp0_unimplemented
;
5936 case CP0_REGISTER_08
:
5938 case CP0_REG08__BADVADDR
:
5939 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5940 tcg_gen_ext32s_tl(arg
, arg
);
5941 register_name
= "BadVAddr";
5943 case CP0_REG08__BADINSTR
:
5945 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5946 register_name
= "BadInstr";
5948 case CP0_REG08__BADINSTRP
:
5950 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5951 register_name
= "BadInstrP";
5953 case CP0_REG08__BADINSTRX
:
5955 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
5956 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
5957 register_name
= "BadInstrX";
5960 goto cp0_unimplemented
;
5963 case CP0_REGISTER_09
:
5965 case CP0_REG09__COUNT
:
5966 /* Mark as an IO operation because we read the time. */
5967 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
5970 gen_helper_mfc0_count(arg
, cpu_env
);
5972 * Break the TB to be able to take timer interrupts immediately
5973 * after reading count. DISAS_STOP isn't sufficient, we need to
5974 * ensure we break completely out of translated code.
5976 gen_save_pc(ctx
->base
.pc_next
+ 4);
5977 ctx
->base
.is_jmp
= DISAS_EXIT
;
5978 register_name
= "Count";
5980 case CP0_REG09__SAARI
:
5981 CP0_CHECK(ctx
->saar
);
5982 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
5983 register_name
= "SAARI";
5985 case CP0_REG09__SAAR
:
5986 CP0_CHECK(ctx
->saar
);
5987 gen_helper_mfc0_saar(arg
, cpu_env
);
5988 register_name
= "SAAR";
5991 goto cp0_unimplemented
;
5994 case CP0_REGISTER_10
:
5996 case CP0_REG10__ENTRYHI
:
5997 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5998 tcg_gen_ext32s_tl(arg
, arg
);
5999 register_name
= "EntryHi";
6002 goto cp0_unimplemented
;
6005 case CP0_REGISTER_11
:
6007 case CP0_REG11__COMPARE
:
6008 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6009 register_name
= "Compare";
6011 /* 6,7 are implementation dependent */
6013 goto cp0_unimplemented
;
6016 case CP0_REGISTER_12
:
6018 case CP0_REG12__STATUS
:
6019 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6020 register_name
= "Status";
6022 case CP0_REG12__INTCTL
:
6023 check_insn(ctx
, ISA_MIPS_R2
);
6024 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6025 register_name
= "IntCtl";
6027 case CP0_REG12__SRSCTL
:
6028 check_insn(ctx
, ISA_MIPS_R2
);
6029 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6030 register_name
= "SRSCtl";
6032 case CP0_REG12__SRSMAP
:
6033 check_insn(ctx
, ISA_MIPS_R2
);
6034 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6035 register_name
= "SRSMap";
6038 goto cp0_unimplemented
;
6041 case CP0_REGISTER_13
:
6043 case CP0_REG13__CAUSE
:
6044 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6045 register_name
= "Cause";
6048 goto cp0_unimplemented
;
6051 case CP0_REGISTER_14
:
6053 case CP0_REG14__EPC
:
6054 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6055 tcg_gen_ext32s_tl(arg
, arg
);
6056 register_name
= "EPC";
6059 goto cp0_unimplemented
;
6062 case CP0_REGISTER_15
:
6064 case CP0_REG15__PRID
:
6065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6066 register_name
= "PRid";
6068 case CP0_REG15__EBASE
:
6069 check_insn(ctx
, ISA_MIPS_R2
);
6070 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
6071 tcg_gen_ext32s_tl(arg
, arg
);
6072 register_name
= "EBase";
6074 case CP0_REG15__CMGCRBASE
:
6075 check_insn(ctx
, ISA_MIPS_R2
);
6076 CP0_CHECK(ctx
->cmgcr
);
6077 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6078 tcg_gen_ext32s_tl(arg
, arg
);
6079 register_name
= "CMGCRBase";
6082 goto cp0_unimplemented
;
6085 case CP0_REGISTER_16
:
6087 case CP0_REG16__CONFIG
:
6088 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6089 register_name
= "Config";
6091 case CP0_REG16__CONFIG1
:
6092 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6093 register_name
= "Config1";
6095 case CP0_REG16__CONFIG2
:
6096 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6097 register_name
= "Config2";
6099 case CP0_REG16__CONFIG3
:
6100 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6101 register_name
= "Config3";
6103 case CP0_REG16__CONFIG4
:
6104 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6105 register_name
= "Config4";
6107 case CP0_REG16__CONFIG5
:
6108 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6109 register_name
= "Config5";
6111 /* 6,7 are implementation dependent */
6112 case CP0_REG16__CONFIG6
:
6113 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6114 register_name
= "Config6";
6116 case CP0_REG16__CONFIG7
:
6117 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6118 register_name
= "Config7";
6121 goto cp0_unimplemented
;
6124 case CP0_REGISTER_17
:
6126 case CP0_REG17__LLADDR
:
6127 gen_helper_mfc0_lladdr(arg
, cpu_env
);
6128 register_name
= "LLAddr";
6130 case CP0_REG17__MAAR
:
6131 CP0_CHECK(ctx
->mrp
);
6132 gen_helper_mfc0_maar(arg
, cpu_env
);
6133 register_name
= "MAAR";
6135 case CP0_REG17__MAARI
:
6136 CP0_CHECK(ctx
->mrp
);
6137 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6138 register_name
= "MAARI";
6141 goto cp0_unimplemented
;
6144 case CP0_REGISTER_18
:
6146 case CP0_REG18__WATCHLO0
:
6147 case CP0_REG18__WATCHLO1
:
6148 case CP0_REG18__WATCHLO2
:
6149 case CP0_REG18__WATCHLO3
:
6150 case CP0_REG18__WATCHLO4
:
6151 case CP0_REG18__WATCHLO5
:
6152 case CP0_REG18__WATCHLO6
:
6153 case CP0_REG18__WATCHLO7
:
6154 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6155 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
6156 register_name
= "WatchLo";
6159 goto cp0_unimplemented
;
6162 case CP0_REGISTER_19
:
6164 case CP0_REG19__WATCHHI0
:
6165 case CP0_REG19__WATCHHI1
:
6166 case CP0_REG19__WATCHHI2
:
6167 case CP0_REG19__WATCHHI3
:
6168 case CP0_REG19__WATCHHI4
:
6169 case CP0_REG19__WATCHHI5
:
6170 case CP0_REG19__WATCHHI6
:
6171 case CP0_REG19__WATCHHI7
:
6172 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6173 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6174 register_name
= "WatchHi";
6177 goto cp0_unimplemented
;
6180 case CP0_REGISTER_20
:
6182 case CP0_REG20__XCONTEXT
:
6183 #if defined(TARGET_MIPS64)
6184 check_insn(ctx
, ISA_MIPS3
);
6185 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6186 tcg_gen_ext32s_tl(arg
, arg
);
6187 register_name
= "XContext";
6191 goto cp0_unimplemented
;
6194 case CP0_REGISTER_21
:
6195 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6196 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6199 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6200 register_name
= "Framemask";
6203 goto cp0_unimplemented
;
6206 case CP0_REGISTER_22
:
6207 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6208 register_name
= "'Diagnostic"; /* implementation dependent */
6210 case CP0_REGISTER_23
:
6212 case CP0_REG23__DEBUG
:
6213 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6214 register_name
= "Debug";
6216 case CP0_REG23__TRACECONTROL
:
6217 /* PDtrace support */
6218 /* gen_helper_mfc0_tracecontrol(arg); */
6219 register_name
= "TraceControl";
6220 goto cp0_unimplemented
;
6221 case CP0_REG23__TRACECONTROL2
:
6222 /* PDtrace support */
6223 /* gen_helper_mfc0_tracecontrol2(arg); */
6224 register_name
= "TraceControl2";
6225 goto cp0_unimplemented
;
6226 case CP0_REG23__USERTRACEDATA1
:
6227 /* PDtrace support */
6228 /* gen_helper_mfc0_usertracedata1(arg);*/
6229 register_name
= "UserTraceData1";
6230 goto cp0_unimplemented
;
6231 case CP0_REG23__TRACEIBPC
:
6232 /* PDtrace support */
6233 /* gen_helper_mfc0_traceibpc(arg); */
6234 register_name
= "TraceIBPC";
6235 goto cp0_unimplemented
;
6236 case CP0_REG23__TRACEDBPC
:
6237 /* PDtrace support */
6238 /* gen_helper_mfc0_tracedbpc(arg); */
6239 register_name
= "TraceDBPC";
6240 goto cp0_unimplemented
;
6242 goto cp0_unimplemented
;
6245 case CP0_REGISTER_24
:
6247 case CP0_REG24__DEPC
:
6249 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6250 tcg_gen_ext32s_tl(arg
, arg
);
6251 register_name
= "DEPC";
6254 goto cp0_unimplemented
;
6257 case CP0_REGISTER_25
:
6259 case CP0_REG25__PERFCTL0
:
6260 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6261 register_name
= "Performance0";
6263 case CP0_REG25__PERFCNT0
:
6264 /* gen_helper_mfc0_performance1(arg); */
6265 register_name
= "Performance1";
6266 goto cp0_unimplemented
;
6267 case CP0_REG25__PERFCTL1
:
6268 /* gen_helper_mfc0_performance2(arg); */
6269 register_name
= "Performance2";
6270 goto cp0_unimplemented
;
6271 case CP0_REG25__PERFCNT1
:
6272 /* gen_helper_mfc0_performance3(arg); */
6273 register_name
= "Performance3";
6274 goto cp0_unimplemented
;
6275 case CP0_REG25__PERFCTL2
:
6276 /* gen_helper_mfc0_performance4(arg); */
6277 register_name
= "Performance4";
6278 goto cp0_unimplemented
;
6279 case CP0_REG25__PERFCNT2
:
6280 /* gen_helper_mfc0_performance5(arg); */
6281 register_name
= "Performance5";
6282 goto cp0_unimplemented
;
6283 case CP0_REG25__PERFCTL3
:
6284 /* gen_helper_mfc0_performance6(arg); */
6285 register_name
= "Performance6";
6286 goto cp0_unimplemented
;
6287 case CP0_REG25__PERFCNT3
:
6288 /* gen_helper_mfc0_performance7(arg); */
6289 register_name
= "Performance7";
6290 goto cp0_unimplemented
;
6292 goto cp0_unimplemented
;
6295 case CP0_REGISTER_26
:
6297 case CP0_REG26__ERRCTL
:
6298 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6299 register_name
= "ErrCtl";
6302 goto cp0_unimplemented
;
6305 case CP0_REGISTER_27
:
6307 case CP0_REG27__CACHERR
:
6308 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6309 register_name
= "CacheErr";
6312 goto cp0_unimplemented
;
6315 case CP0_REGISTER_28
:
6317 case CP0_REG28__TAGLO
:
6318 case CP0_REG28__TAGLO1
:
6319 case CP0_REG28__TAGLO2
:
6320 case CP0_REG28__TAGLO3
:
6322 TCGv_i64 tmp
= tcg_temp_new_i64();
6323 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
6324 gen_move_low32(arg
, tmp
);
6325 tcg_temp_free_i64(tmp
);
6327 register_name
= "TagLo";
6329 case CP0_REG28__DATALO
:
6330 case CP0_REG28__DATALO1
:
6331 case CP0_REG28__DATALO2
:
6332 case CP0_REG28__DATALO3
:
6333 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6334 register_name
= "DataLo";
6337 goto cp0_unimplemented
;
6340 case CP0_REGISTER_29
:
6342 case CP0_REG29__TAGHI
:
6343 case CP0_REG29__TAGHI1
:
6344 case CP0_REG29__TAGHI2
:
6345 case CP0_REG29__TAGHI3
:
6346 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6347 register_name
= "TagHi";
6349 case CP0_REG29__DATAHI
:
6350 case CP0_REG29__DATAHI1
:
6351 case CP0_REG29__DATAHI2
:
6352 case CP0_REG29__DATAHI3
:
6353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6354 register_name
= "DataHi";
6357 goto cp0_unimplemented
;
6360 case CP0_REGISTER_30
:
6362 case CP0_REG30__ERROREPC
:
6363 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6364 tcg_gen_ext32s_tl(arg
, arg
);
6365 register_name
= "ErrorEPC";
6368 goto cp0_unimplemented
;
6371 case CP0_REGISTER_31
:
6373 case CP0_REG31__DESAVE
:
6375 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6376 register_name
= "DESAVE";
6378 case CP0_REG31__KSCRATCH1
:
6379 case CP0_REG31__KSCRATCH2
:
6380 case CP0_REG31__KSCRATCH3
:
6381 case CP0_REG31__KSCRATCH4
:
6382 case CP0_REG31__KSCRATCH5
:
6383 case CP0_REG31__KSCRATCH6
:
6384 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6385 tcg_gen_ld_tl(arg
, cpu_env
,
6386 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
6387 tcg_gen_ext32s_tl(arg
, arg
);
6388 register_name
= "KScratch";
6391 goto cp0_unimplemented
;
6395 goto cp0_unimplemented
;
6397 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
6401 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
6402 register_name
, reg
, sel
);
6403 gen_mfc0_unimplemented(ctx
, arg
);
6406 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6408 const char *register_name
= "invalid";
6411 check_insn(ctx
, ISA_MIPS_R1
);
6414 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
6419 case CP0_REGISTER_00
:
6421 case CP0_REG00__INDEX
:
6422 gen_helper_mtc0_index(cpu_env
, arg
);
6423 register_name
= "Index";
6425 case CP0_REG00__MVPCONTROL
:
6426 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6427 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6428 register_name
= "MVPControl";
6430 case CP0_REG00__MVPCONF0
:
6431 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6433 register_name
= "MVPConf0";
6435 case CP0_REG00__MVPCONF1
:
6436 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6438 register_name
= "MVPConf1";
6440 case CP0_REG00__VPCONTROL
:
6443 register_name
= "VPControl";
6446 goto cp0_unimplemented
;
6449 case CP0_REGISTER_01
:
6451 case CP0_REG01__RANDOM
:
6453 register_name
= "Random";
6455 case CP0_REG01__VPECONTROL
:
6456 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6457 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6458 register_name
= "VPEControl";
6460 case CP0_REG01__VPECONF0
:
6461 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6462 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6463 register_name
= "VPEConf0";
6465 case CP0_REG01__VPECONF1
:
6466 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6467 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6468 register_name
= "VPEConf1";
6470 case CP0_REG01__YQMASK
:
6471 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6472 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6473 register_name
= "YQMask";
6475 case CP0_REG01__VPESCHEDULE
:
6476 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6477 tcg_gen_st_tl(arg
, cpu_env
,
6478 offsetof(CPUMIPSState
, CP0_VPESchedule
));
6479 register_name
= "VPESchedule";
6481 case CP0_REG01__VPESCHEFBACK
:
6482 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6483 tcg_gen_st_tl(arg
, cpu_env
,
6484 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6485 register_name
= "VPEScheFBack";
6487 case CP0_REG01__VPEOPT
:
6488 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6489 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
6490 register_name
= "VPEOpt";
6493 goto cp0_unimplemented
;
6496 case CP0_REGISTER_02
:
6498 case CP0_REG02__ENTRYLO0
:
6499 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
6500 register_name
= "EntryLo0";
6502 case CP0_REG02__TCSTATUS
:
6503 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6504 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
6505 register_name
= "TCStatus";
6507 case CP0_REG02__TCBIND
:
6508 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6509 gen_helper_mtc0_tcbind(cpu_env
, arg
);
6510 register_name
= "TCBind";
6512 case CP0_REG02__TCRESTART
:
6513 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6514 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
6515 register_name
= "TCRestart";
6517 case CP0_REG02__TCHALT
:
6518 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6519 gen_helper_mtc0_tchalt(cpu_env
, arg
);
6520 register_name
= "TCHalt";
6522 case CP0_REG02__TCCONTEXT
:
6523 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6524 gen_helper_mtc0_tccontext(cpu_env
, arg
);
6525 register_name
= "TCContext";
6527 case CP0_REG02__TCSCHEDULE
:
6528 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6529 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
6530 register_name
= "TCSchedule";
6532 case CP0_REG02__TCSCHEFBACK
:
6533 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6534 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
6535 register_name
= "TCScheFBack";
6538 goto cp0_unimplemented
;
6541 case CP0_REGISTER_03
:
6543 case CP0_REG03__ENTRYLO1
:
6544 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
6545 register_name
= "EntryLo1";
6547 case CP0_REG03__GLOBALNUM
:
6550 register_name
= "GlobalNumber";
6553 goto cp0_unimplemented
;
6556 case CP0_REGISTER_04
:
6558 case CP0_REG04__CONTEXT
:
6559 gen_helper_mtc0_context(cpu_env
, arg
);
6560 register_name
= "Context";
6562 case CP0_REG04__CONTEXTCONFIG
:
6564 /* gen_helper_mtc0_contextconfig(arg); */
6565 register_name
= "ContextConfig";
6566 goto cp0_unimplemented
;
6567 case CP0_REG04__USERLOCAL
:
6568 CP0_CHECK(ctx
->ulri
);
6569 tcg_gen_st_tl(arg
, cpu_env
,
6570 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6571 register_name
= "UserLocal";
6573 case CP0_REG04__MMID
:
6575 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
6576 register_name
= "MMID";
6579 goto cp0_unimplemented
;
6582 case CP0_REGISTER_05
:
6584 case CP0_REG05__PAGEMASK
:
6585 gen_helper_mtc0_pagemask(cpu_env
, arg
);
6586 register_name
= "PageMask";
6588 case CP0_REG05__PAGEGRAIN
:
6589 check_insn(ctx
, ISA_MIPS_R2
);
6590 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
6591 register_name
= "PageGrain";
6592 ctx
->base
.is_jmp
= DISAS_STOP
;
6594 case CP0_REG05__SEGCTL0
:
6596 gen_helper_mtc0_segctl0(cpu_env
, arg
);
6597 register_name
= "SegCtl0";
6599 case CP0_REG05__SEGCTL1
:
6601 gen_helper_mtc0_segctl1(cpu_env
, arg
);
6602 register_name
= "SegCtl1";
6604 case CP0_REG05__SEGCTL2
:
6606 gen_helper_mtc0_segctl2(cpu_env
, arg
);
6607 register_name
= "SegCtl2";
6609 case CP0_REG05__PWBASE
:
6611 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
6612 register_name
= "PWBase";
6614 case CP0_REG05__PWFIELD
:
6616 gen_helper_mtc0_pwfield(cpu_env
, arg
);
6617 register_name
= "PWField";
6619 case CP0_REG05__PWSIZE
:
6621 gen_helper_mtc0_pwsize(cpu_env
, arg
);
6622 register_name
= "PWSize";
6625 goto cp0_unimplemented
;
6628 case CP0_REGISTER_06
:
6630 case CP0_REG06__WIRED
:
6631 gen_helper_mtc0_wired(cpu_env
, arg
);
6632 register_name
= "Wired";
6634 case CP0_REG06__SRSCONF0
:
6635 check_insn(ctx
, ISA_MIPS_R2
);
6636 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
6637 register_name
= "SRSConf0";
6639 case CP0_REG06__SRSCONF1
:
6640 check_insn(ctx
, ISA_MIPS_R2
);
6641 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
6642 register_name
= "SRSConf1";
6644 case CP0_REG06__SRSCONF2
:
6645 check_insn(ctx
, ISA_MIPS_R2
);
6646 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
6647 register_name
= "SRSConf2";
6649 case CP0_REG06__SRSCONF3
:
6650 check_insn(ctx
, ISA_MIPS_R2
);
6651 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
6652 register_name
= "SRSConf3";
6654 case CP0_REG06__SRSCONF4
:
6655 check_insn(ctx
, ISA_MIPS_R2
);
6656 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
6657 register_name
= "SRSConf4";
6659 case CP0_REG06__PWCTL
:
6661 gen_helper_mtc0_pwctl(cpu_env
, arg
);
6662 register_name
= "PWCtl";
6665 goto cp0_unimplemented
;
6668 case CP0_REGISTER_07
:
6670 case CP0_REG07__HWRENA
:
6671 check_insn(ctx
, ISA_MIPS_R2
);
6672 gen_helper_mtc0_hwrena(cpu_env
, arg
);
6673 ctx
->base
.is_jmp
= DISAS_STOP
;
6674 register_name
= "HWREna";
6677 goto cp0_unimplemented
;
6680 case CP0_REGISTER_08
:
6682 case CP0_REG08__BADVADDR
:
6684 register_name
= "BadVAddr";
6686 case CP0_REG08__BADINSTR
:
6688 register_name
= "BadInstr";
6690 case CP0_REG08__BADINSTRP
:
6692 register_name
= "BadInstrP";
6694 case CP0_REG08__BADINSTRX
:
6696 register_name
= "BadInstrX";
6699 goto cp0_unimplemented
;
6702 case CP0_REGISTER_09
:
6704 case CP0_REG09__COUNT
:
6705 gen_helper_mtc0_count(cpu_env
, arg
);
6706 register_name
= "Count";
6708 case CP0_REG09__SAARI
:
6709 CP0_CHECK(ctx
->saar
);
6710 gen_helper_mtc0_saari(cpu_env
, arg
);
6711 register_name
= "SAARI";
6713 case CP0_REG09__SAAR
:
6714 CP0_CHECK(ctx
->saar
);
6715 gen_helper_mtc0_saar(cpu_env
, arg
);
6716 register_name
= "SAAR";
6719 goto cp0_unimplemented
;
6722 case CP0_REGISTER_10
:
6724 case CP0_REG10__ENTRYHI
:
6725 gen_helper_mtc0_entryhi(cpu_env
, arg
);
6726 register_name
= "EntryHi";
6729 goto cp0_unimplemented
;
6732 case CP0_REGISTER_11
:
6734 case CP0_REG11__COMPARE
:
6735 gen_helper_mtc0_compare(cpu_env
, arg
);
6736 register_name
= "Compare";
6738 /* 6,7 are implementation dependent */
6740 goto cp0_unimplemented
;
6743 case CP0_REGISTER_12
:
6745 case CP0_REG12__STATUS
:
6746 save_cpu_state(ctx
, 1);
6747 gen_helper_mtc0_status(cpu_env
, arg
);
6748 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6749 gen_save_pc(ctx
->base
.pc_next
+ 4);
6750 ctx
->base
.is_jmp
= DISAS_EXIT
;
6751 register_name
= "Status";
6753 case CP0_REG12__INTCTL
:
6754 check_insn(ctx
, ISA_MIPS_R2
);
6755 gen_helper_mtc0_intctl(cpu_env
, arg
);
6756 /* Stop translation as we may have switched the execution mode */
6757 ctx
->base
.is_jmp
= DISAS_STOP
;
6758 register_name
= "IntCtl";
6760 case CP0_REG12__SRSCTL
:
6761 check_insn(ctx
, ISA_MIPS_R2
);
6762 gen_helper_mtc0_srsctl(cpu_env
, arg
);
6763 /* Stop translation as we may have switched the execution mode */
6764 ctx
->base
.is_jmp
= DISAS_STOP
;
6765 register_name
= "SRSCtl";
6767 case CP0_REG12__SRSMAP
:
6768 check_insn(ctx
, ISA_MIPS_R2
);
6769 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6770 /* Stop translation as we may have switched the execution mode */
6771 ctx
->base
.is_jmp
= DISAS_STOP
;
6772 register_name
= "SRSMap";
6775 goto cp0_unimplemented
;
6778 case CP0_REGISTER_13
:
6780 case CP0_REG13__CAUSE
:
6781 save_cpu_state(ctx
, 1);
6782 gen_helper_mtc0_cause(cpu_env
, arg
);
6784 * Stop translation as we may have triggered an interrupt.
6785 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6786 * translated code to check for pending interrupts.
6788 gen_save_pc(ctx
->base
.pc_next
+ 4);
6789 ctx
->base
.is_jmp
= DISAS_EXIT
;
6790 register_name
= "Cause";
6793 goto cp0_unimplemented
;
6796 case CP0_REGISTER_14
:
6798 case CP0_REG14__EPC
:
6799 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6800 register_name
= "EPC";
6803 goto cp0_unimplemented
;
6806 case CP0_REGISTER_15
:
6808 case CP0_REG15__PRID
:
6810 register_name
= "PRid";
6812 case CP0_REG15__EBASE
:
6813 check_insn(ctx
, ISA_MIPS_R2
);
6814 gen_helper_mtc0_ebase(cpu_env
, arg
);
6815 register_name
= "EBase";
6818 goto cp0_unimplemented
;
6821 case CP0_REGISTER_16
:
6823 case CP0_REG16__CONFIG
:
6824 gen_helper_mtc0_config0(cpu_env
, arg
);
6825 register_name
= "Config";
6826 /* Stop translation as we may have switched the execution mode */
6827 ctx
->base
.is_jmp
= DISAS_STOP
;
6829 case CP0_REG16__CONFIG1
:
6830 /* ignored, read only */
6831 register_name
= "Config1";
6833 case CP0_REG16__CONFIG2
:
6834 gen_helper_mtc0_config2(cpu_env
, arg
);
6835 register_name
= "Config2";
6836 /* Stop translation as we may have switched the execution mode */
6837 ctx
->base
.is_jmp
= DISAS_STOP
;
6839 case CP0_REG16__CONFIG3
:
6840 gen_helper_mtc0_config3(cpu_env
, arg
);
6841 register_name
= "Config3";
6842 /* Stop translation as we may have switched the execution mode */
6843 ctx
->base
.is_jmp
= DISAS_STOP
;
6845 case CP0_REG16__CONFIG4
:
6846 gen_helper_mtc0_config4(cpu_env
, arg
);
6847 register_name
= "Config4";
6848 ctx
->base
.is_jmp
= DISAS_STOP
;
6850 case CP0_REG16__CONFIG5
:
6851 gen_helper_mtc0_config5(cpu_env
, arg
);
6852 register_name
= "Config5";
6853 /* Stop translation as we may have switched the execution mode */
6854 ctx
->base
.is_jmp
= DISAS_STOP
;
6856 /* 6,7 are implementation dependent */
6857 case CP0_REG16__CONFIG6
:
6859 register_name
= "Config6";
6861 case CP0_REG16__CONFIG7
:
6863 register_name
= "Config7";
6866 register_name
= "Invalid config selector";
6867 goto cp0_unimplemented
;
6870 case CP0_REGISTER_17
:
6872 case CP0_REG17__LLADDR
:
6873 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6874 register_name
= "LLAddr";
6876 case CP0_REG17__MAAR
:
6877 CP0_CHECK(ctx
->mrp
);
6878 gen_helper_mtc0_maar(cpu_env
, arg
);
6879 register_name
= "MAAR";
6881 case CP0_REG17__MAARI
:
6882 CP0_CHECK(ctx
->mrp
);
6883 gen_helper_mtc0_maari(cpu_env
, arg
);
6884 register_name
= "MAARI";
6887 goto cp0_unimplemented
;
6890 case CP0_REGISTER_18
:
6892 case CP0_REG18__WATCHLO0
:
6893 case CP0_REG18__WATCHLO1
:
6894 case CP0_REG18__WATCHLO2
:
6895 case CP0_REG18__WATCHLO3
:
6896 case CP0_REG18__WATCHLO4
:
6897 case CP0_REG18__WATCHLO5
:
6898 case CP0_REG18__WATCHLO6
:
6899 case CP0_REG18__WATCHLO7
:
6900 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6901 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6902 register_name
= "WatchLo";
6905 goto cp0_unimplemented
;
6908 case CP0_REGISTER_19
:
6910 case CP0_REG19__WATCHHI0
:
6911 case CP0_REG19__WATCHHI1
:
6912 case CP0_REG19__WATCHHI2
:
6913 case CP0_REG19__WATCHHI3
:
6914 case CP0_REG19__WATCHHI4
:
6915 case CP0_REG19__WATCHHI5
:
6916 case CP0_REG19__WATCHHI6
:
6917 case CP0_REG19__WATCHHI7
:
6918 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
6919 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6920 register_name
= "WatchHi";
6923 goto cp0_unimplemented
;
6926 case CP0_REGISTER_20
:
6928 case CP0_REG20__XCONTEXT
:
6929 #if defined(TARGET_MIPS64)
6930 check_insn(ctx
, ISA_MIPS3
);
6931 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6932 register_name
= "XContext";
6936 goto cp0_unimplemented
;
6939 case CP0_REGISTER_21
:
6940 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6941 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6944 gen_helper_mtc0_framemask(cpu_env
, arg
);
6945 register_name
= "Framemask";
6948 goto cp0_unimplemented
;
6951 case CP0_REGISTER_22
:
6953 register_name
= "Diagnostic"; /* implementation dependent */
6955 case CP0_REGISTER_23
:
6957 case CP0_REG23__DEBUG
:
6958 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6959 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6960 gen_save_pc(ctx
->base
.pc_next
+ 4);
6961 ctx
->base
.is_jmp
= DISAS_EXIT
;
6962 register_name
= "Debug";
6964 case CP0_REG23__TRACECONTROL
:
6965 /* PDtrace support */
6966 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
6967 register_name
= "TraceControl";
6968 /* Stop translation as we may have switched the execution mode */
6969 ctx
->base
.is_jmp
= DISAS_STOP
;
6970 goto cp0_unimplemented
;
6971 case CP0_REG23__TRACECONTROL2
:
6972 /* PDtrace support */
6973 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
6974 register_name
= "TraceControl2";
6975 /* Stop translation as we may have switched the execution mode */
6976 ctx
->base
.is_jmp
= DISAS_STOP
;
6977 goto cp0_unimplemented
;
6978 case CP0_REG23__USERTRACEDATA1
:
6979 /* Stop translation as we may have switched the execution mode */
6980 ctx
->base
.is_jmp
= DISAS_STOP
;
6981 /* PDtrace support */
6982 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
6983 register_name
= "UserTraceData";
6984 /* Stop translation as we may have switched the execution mode */
6985 ctx
->base
.is_jmp
= DISAS_STOP
;
6986 goto cp0_unimplemented
;
6987 case CP0_REG23__TRACEIBPC
:
6988 /* PDtrace support */
6989 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
6990 /* Stop translation as we may have switched the execution mode */
6991 ctx
->base
.is_jmp
= DISAS_STOP
;
6992 register_name
= "TraceIBPC";
6993 goto cp0_unimplemented
;
6994 case CP0_REG23__TRACEDBPC
:
6995 /* PDtrace support */
6996 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
6997 /* Stop translation as we may have switched the execution mode */
6998 ctx
->base
.is_jmp
= DISAS_STOP
;
6999 register_name
= "TraceDBPC";
7000 goto cp0_unimplemented
;
7002 goto cp0_unimplemented
;
7005 case CP0_REGISTER_24
:
7007 case CP0_REG24__DEPC
:
7009 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7010 register_name
= "DEPC";
7013 goto cp0_unimplemented
;
7016 case CP0_REGISTER_25
:
7018 case CP0_REG25__PERFCTL0
:
7019 gen_helper_mtc0_performance0(cpu_env
, arg
);
7020 register_name
= "Performance0";
7022 case CP0_REG25__PERFCNT0
:
7023 /* gen_helper_mtc0_performance1(arg); */
7024 register_name
= "Performance1";
7025 goto cp0_unimplemented
;
7026 case CP0_REG25__PERFCTL1
:
7027 /* gen_helper_mtc0_performance2(arg); */
7028 register_name
= "Performance2";
7029 goto cp0_unimplemented
;
7030 case CP0_REG25__PERFCNT1
:
7031 /* gen_helper_mtc0_performance3(arg); */
7032 register_name
= "Performance3";
7033 goto cp0_unimplemented
;
7034 case CP0_REG25__PERFCTL2
:
7035 /* gen_helper_mtc0_performance4(arg); */
7036 register_name
= "Performance4";
7037 goto cp0_unimplemented
;
7038 case CP0_REG25__PERFCNT2
:
7039 /* gen_helper_mtc0_performance5(arg); */
7040 register_name
= "Performance5";
7041 goto cp0_unimplemented
;
7042 case CP0_REG25__PERFCTL3
:
7043 /* gen_helper_mtc0_performance6(arg); */
7044 register_name
= "Performance6";
7045 goto cp0_unimplemented
;
7046 case CP0_REG25__PERFCNT3
:
7047 /* gen_helper_mtc0_performance7(arg); */
7048 register_name
= "Performance7";
7049 goto cp0_unimplemented
;
7051 goto cp0_unimplemented
;
7054 case CP0_REGISTER_26
:
7056 case CP0_REG26__ERRCTL
:
7057 gen_helper_mtc0_errctl(cpu_env
, arg
);
7058 ctx
->base
.is_jmp
= DISAS_STOP
;
7059 register_name
= "ErrCtl";
7062 goto cp0_unimplemented
;
7065 case CP0_REGISTER_27
:
7067 case CP0_REG27__CACHERR
:
7069 register_name
= "CacheErr";
7072 goto cp0_unimplemented
;
7075 case CP0_REGISTER_28
:
7077 case CP0_REG28__TAGLO
:
7078 case CP0_REG28__TAGLO1
:
7079 case CP0_REG28__TAGLO2
:
7080 case CP0_REG28__TAGLO3
:
7081 gen_helper_mtc0_taglo(cpu_env
, arg
);
7082 register_name
= "TagLo";
7084 case CP0_REG28__DATALO
:
7085 case CP0_REG28__DATALO1
:
7086 case CP0_REG28__DATALO2
:
7087 case CP0_REG28__DATALO3
:
7088 gen_helper_mtc0_datalo(cpu_env
, arg
);
7089 register_name
= "DataLo";
7092 goto cp0_unimplemented
;
7095 case CP0_REGISTER_29
:
7097 case CP0_REG29__TAGHI
:
7098 case CP0_REG29__TAGHI1
:
7099 case CP0_REG29__TAGHI2
:
7100 case CP0_REG29__TAGHI3
:
7101 gen_helper_mtc0_taghi(cpu_env
, arg
);
7102 register_name
= "TagHi";
7104 case CP0_REG29__DATAHI
:
7105 case CP0_REG29__DATAHI1
:
7106 case CP0_REG29__DATAHI2
:
7107 case CP0_REG29__DATAHI3
:
7108 gen_helper_mtc0_datahi(cpu_env
, arg
);
7109 register_name
= "DataHi";
7112 register_name
= "invalid sel";
7113 goto cp0_unimplemented
;
7116 case CP0_REGISTER_30
:
7118 case CP0_REG30__ERROREPC
:
7119 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7120 register_name
= "ErrorEPC";
7123 goto cp0_unimplemented
;
7126 case CP0_REGISTER_31
:
7128 case CP0_REG31__DESAVE
:
7130 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7131 register_name
= "DESAVE";
7133 case CP0_REG31__KSCRATCH1
:
7134 case CP0_REG31__KSCRATCH2
:
7135 case CP0_REG31__KSCRATCH3
:
7136 case CP0_REG31__KSCRATCH4
:
7137 case CP0_REG31__KSCRATCH5
:
7138 case CP0_REG31__KSCRATCH6
:
7139 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7140 tcg_gen_st_tl(arg
, cpu_env
,
7141 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7142 register_name
= "KScratch";
7145 goto cp0_unimplemented
;
7149 goto cp0_unimplemented
;
7151 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
7153 /* For simplicity assume that all writes can cause interrupts. */
7154 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7156 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7157 * translated code to check for pending interrupts.
7159 gen_save_pc(ctx
->base
.pc_next
+ 4);
7160 ctx
->base
.is_jmp
= DISAS_EXIT
;
7165 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
7166 register_name
, reg
, sel
);
7169 #if defined(TARGET_MIPS64)
7170 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7172 const char *register_name
= "invalid";
7175 check_insn(ctx
, ISA_MIPS_R1
);
7179 case CP0_REGISTER_00
:
7181 case CP0_REG00__INDEX
:
7182 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
7183 register_name
= "Index";
7185 case CP0_REG00__MVPCONTROL
:
7186 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7187 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
7188 register_name
= "MVPControl";
7190 case CP0_REG00__MVPCONF0
:
7191 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7192 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
7193 register_name
= "MVPConf0";
7195 case CP0_REG00__MVPCONF1
:
7196 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7197 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
7198 register_name
= "MVPConf1";
7200 case CP0_REG00__VPCONTROL
:
7202 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
7203 register_name
= "VPControl";
7206 goto cp0_unimplemented
;
7209 case CP0_REGISTER_01
:
7211 case CP0_REG01__RANDOM
:
7212 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7213 gen_helper_mfc0_random(arg
, cpu_env
);
7214 register_name
= "Random";
7216 case CP0_REG01__VPECONTROL
:
7217 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7218 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
7219 register_name
= "VPEControl";
7221 case CP0_REG01__VPECONF0
:
7222 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7223 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
7224 register_name
= "VPEConf0";
7226 case CP0_REG01__VPECONF1
:
7227 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
7229 register_name
= "VPEConf1";
7231 case CP0_REG01__YQMASK
:
7232 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7233 tcg_gen_ld_tl(arg
, cpu_env
,
7234 offsetof(CPUMIPSState
, CP0_YQMask
));
7235 register_name
= "YQMask";
7237 case CP0_REG01__VPESCHEDULE
:
7238 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7239 tcg_gen_ld_tl(arg
, cpu_env
,
7240 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7241 register_name
= "VPESchedule";
7243 case CP0_REG01__VPESCHEFBACK
:
7244 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7245 tcg_gen_ld_tl(arg
, cpu_env
,
7246 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7247 register_name
= "VPEScheFBack";
7249 case CP0_REG01__VPEOPT
:
7250 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7251 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
7252 register_name
= "VPEOpt";
7255 goto cp0_unimplemented
;
7258 case CP0_REGISTER_02
:
7260 case CP0_REG02__ENTRYLO0
:
7261 tcg_gen_ld_tl(arg
, cpu_env
,
7262 offsetof(CPUMIPSState
, CP0_EntryLo0
));
7263 register_name
= "EntryLo0";
7265 case CP0_REG02__TCSTATUS
:
7266 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7267 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
7268 register_name
= "TCStatus";
7270 case CP0_REG02__TCBIND
:
7271 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7272 gen_helper_mfc0_tcbind(arg
, cpu_env
);
7273 register_name
= "TCBind";
7275 case CP0_REG02__TCRESTART
:
7276 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7277 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
7278 register_name
= "TCRestart";
7280 case CP0_REG02__TCHALT
:
7281 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7282 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
7283 register_name
= "TCHalt";
7285 case CP0_REG02__TCCONTEXT
:
7286 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7287 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
7288 register_name
= "TCContext";
7290 case CP0_REG02__TCSCHEDULE
:
7291 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7292 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
7293 register_name
= "TCSchedule";
7295 case CP0_REG02__TCSCHEFBACK
:
7296 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7297 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
7298 register_name
= "TCScheFBack";
7301 goto cp0_unimplemented
;
7304 case CP0_REGISTER_03
:
7306 case CP0_REG03__ENTRYLO1
:
7307 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
7308 register_name
= "EntryLo1";
7310 case CP0_REG03__GLOBALNUM
:
7312 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
7313 register_name
= "GlobalNumber";
7316 goto cp0_unimplemented
;
7319 case CP0_REGISTER_04
:
7321 case CP0_REG04__CONTEXT
:
7322 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
7323 register_name
= "Context";
7325 case CP0_REG04__CONTEXTCONFIG
:
7327 /* gen_helper_dmfc0_contextconfig(arg); */
7328 register_name
= "ContextConfig";
7329 goto cp0_unimplemented
;
7330 case CP0_REG04__USERLOCAL
:
7331 CP0_CHECK(ctx
->ulri
);
7332 tcg_gen_ld_tl(arg
, cpu_env
,
7333 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7334 register_name
= "UserLocal";
7336 case CP0_REG04__MMID
:
7338 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
7339 register_name
= "MMID";
7342 goto cp0_unimplemented
;
7345 case CP0_REGISTER_05
:
7347 case CP0_REG05__PAGEMASK
:
7348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
7349 register_name
= "PageMask";
7351 case CP0_REG05__PAGEGRAIN
:
7352 check_insn(ctx
, ISA_MIPS_R2
);
7353 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
7354 register_name
= "PageGrain";
7356 case CP0_REG05__SEGCTL0
:
7358 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
7359 register_name
= "SegCtl0";
7361 case CP0_REG05__SEGCTL1
:
7363 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
7364 register_name
= "SegCtl1";
7366 case CP0_REG05__SEGCTL2
:
7368 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7369 register_name
= "SegCtl2";
7371 case CP0_REG05__PWBASE
:
7373 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
7374 register_name
= "PWBase";
7376 case CP0_REG05__PWFIELD
:
7378 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
7379 register_name
= "PWField";
7381 case CP0_REG05__PWSIZE
:
7383 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
7384 register_name
= "PWSize";
7387 goto cp0_unimplemented
;
7390 case CP0_REGISTER_06
:
7392 case CP0_REG06__WIRED
:
7393 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7394 register_name
= "Wired";
7396 case CP0_REG06__SRSCONF0
:
7397 check_insn(ctx
, ISA_MIPS_R2
);
7398 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7399 register_name
= "SRSConf0";
7401 case CP0_REG06__SRSCONF1
:
7402 check_insn(ctx
, ISA_MIPS_R2
);
7403 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7404 register_name
= "SRSConf1";
7406 case CP0_REG06__SRSCONF2
:
7407 check_insn(ctx
, ISA_MIPS_R2
);
7408 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7409 register_name
= "SRSConf2";
7411 case CP0_REG06__SRSCONF3
:
7412 check_insn(ctx
, ISA_MIPS_R2
);
7413 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7414 register_name
= "SRSConf3";
7416 case CP0_REG06__SRSCONF4
:
7417 check_insn(ctx
, ISA_MIPS_R2
);
7418 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7419 register_name
= "SRSConf4";
7421 case CP0_REG06__PWCTL
:
7423 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7424 register_name
= "PWCtl";
7427 goto cp0_unimplemented
;
7430 case CP0_REGISTER_07
:
7432 case CP0_REG07__HWRENA
:
7433 check_insn(ctx
, ISA_MIPS_R2
);
7434 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7435 register_name
= "HWREna";
7438 goto cp0_unimplemented
;
7441 case CP0_REGISTER_08
:
7443 case CP0_REG08__BADVADDR
:
7444 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7445 register_name
= "BadVAddr";
7447 case CP0_REG08__BADINSTR
:
7449 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7450 register_name
= "BadInstr";
7452 case CP0_REG08__BADINSTRP
:
7454 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7455 register_name
= "BadInstrP";
7457 case CP0_REG08__BADINSTRX
:
7459 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7460 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7461 register_name
= "BadInstrX";
7464 goto cp0_unimplemented
;
7467 case CP0_REGISTER_09
:
7469 case CP0_REG09__COUNT
:
7470 /* Mark as an IO operation because we read the time. */
7471 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7474 gen_helper_mfc0_count(arg
, cpu_env
);
7476 * Break the TB to be able to take timer interrupts immediately
7477 * after reading count. DISAS_STOP isn't sufficient, we need to
7478 * ensure we break completely out of translated code.
7480 gen_save_pc(ctx
->base
.pc_next
+ 4);
7481 ctx
->base
.is_jmp
= DISAS_EXIT
;
7482 register_name
= "Count";
7484 case CP0_REG09__SAARI
:
7485 CP0_CHECK(ctx
->saar
);
7486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7487 register_name
= "SAARI";
7489 case CP0_REG09__SAAR
:
7490 CP0_CHECK(ctx
->saar
);
7491 gen_helper_dmfc0_saar(arg
, cpu_env
);
7492 register_name
= "SAAR";
7495 goto cp0_unimplemented
;
7498 case CP0_REGISTER_10
:
7500 case CP0_REG10__ENTRYHI
:
7501 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7502 register_name
= "EntryHi";
7505 goto cp0_unimplemented
;
7508 case CP0_REGISTER_11
:
7510 case CP0_REG11__COMPARE
:
7511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7512 register_name
= "Compare";
7514 /* 6,7 are implementation dependent */
7516 goto cp0_unimplemented
;
7519 case CP0_REGISTER_12
:
7521 case CP0_REG12__STATUS
:
7522 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7523 register_name
= "Status";
7525 case CP0_REG12__INTCTL
:
7526 check_insn(ctx
, ISA_MIPS_R2
);
7527 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7528 register_name
= "IntCtl";
7530 case CP0_REG12__SRSCTL
:
7531 check_insn(ctx
, ISA_MIPS_R2
);
7532 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7533 register_name
= "SRSCtl";
7535 case CP0_REG12__SRSMAP
:
7536 check_insn(ctx
, ISA_MIPS_R2
);
7537 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7538 register_name
= "SRSMap";
7541 goto cp0_unimplemented
;
7544 case CP0_REGISTER_13
:
7546 case CP0_REG13__CAUSE
:
7547 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7548 register_name
= "Cause";
7551 goto cp0_unimplemented
;
7554 case CP0_REGISTER_14
:
7556 case CP0_REG14__EPC
:
7557 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7558 register_name
= "EPC";
7561 goto cp0_unimplemented
;
7564 case CP0_REGISTER_15
:
7566 case CP0_REG15__PRID
:
7567 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7568 register_name
= "PRid";
7570 case CP0_REG15__EBASE
:
7571 check_insn(ctx
, ISA_MIPS_R2
);
7572 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7573 register_name
= "EBase";
7575 case CP0_REG15__CMGCRBASE
:
7576 check_insn(ctx
, ISA_MIPS_R2
);
7577 CP0_CHECK(ctx
->cmgcr
);
7578 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7579 register_name
= "CMGCRBase";
7582 goto cp0_unimplemented
;
7585 case CP0_REGISTER_16
:
7587 case CP0_REG16__CONFIG
:
7588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7589 register_name
= "Config";
7591 case CP0_REG16__CONFIG1
:
7592 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7593 register_name
= "Config1";
7595 case CP0_REG16__CONFIG2
:
7596 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7597 register_name
= "Config2";
7599 case CP0_REG16__CONFIG3
:
7600 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7601 register_name
= "Config3";
7603 case CP0_REG16__CONFIG4
:
7604 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7605 register_name
= "Config4";
7607 case CP0_REG16__CONFIG5
:
7608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7609 register_name
= "Config5";
7611 /* 6,7 are implementation dependent */
7612 case CP0_REG16__CONFIG6
:
7613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7614 register_name
= "Config6";
7616 case CP0_REG16__CONFIG7
:
7617 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7618 register_name
= "Config7";
7621 goto cp0_unimplemented
;
7624 case CP0_REGISTER_17
:
7626 case CP0_REG17__LLADDR
:
7627 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
7628 register_name
= "LLAddr";
7630 case CP0_REG17__MAAR
:
7631 CP0_CHECK(ctx
->mrp
);
7632 gen_helper_dmfc0_maar(arg
, cpu_env
);
7633 register_name
= "MAAR";
7635 case CP0_REG17__MAARI
:
7636 CP0_CHECK(ctx
->mrp
);
7637 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7638 register_name
= "MAARI";
7641 goto cp0_unimplemented
;
7644 case CP0_REGISTER_18
:
7646 case CP0_REG18__WATCHLO0
:
7647 case CP0_REG18__WATCHLO1
:
7648 case CP0_REG18__WATCHLO2
:
7649 case CP0_REG18__WATCHLO3
:
7650 case CP0_REG18__WATCHLO4
:
7651 case CP0_REG18__WATCHLO5
:
7652 case CP0_REG18__WATCHLO6
:
7653 case CP0_REG18__WATCHLO7
:
7654 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7655 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
7656 register_name
= "WatchLo";
7659 goto cp0_unimplemented
;
7662 case CP0_REGISTER_19
:
7664 case CP0_REG19__WATCHHI0
:
7665 case CP0_REG19__WATCHHI1
:
7666 case CP0_REG19__WATCHHI2
:
7667 case CP0_REG19__WATCHHI3
:
7668 case CP0_REG19__WATCHHI4
:
7669 case CP0_REG19__WATCHHI5
:
7670 case CP0_REG19__WATCHHI6
:
7671 case CP0_REG19__WATCHHI7
:
7672 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7673 gen_helper_1e0i(dmfc0_watchhi
, arg
, sel
);
7674 register_name
= "WatchHi";
7677 goto cp0_unimplemented
;
7680 case CP0_REGISTER_20
:
7682 case CP0_REG20__XCONTEXT
:
7683 check_insn(ctx
, ISA_MIPS3
);
7684 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7685 register_name
= "XContext";
7688 goto cp0_unimplemented
;
7691 case CP0_REGISTER_21
:
7692 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7693 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7696 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7697 register_name
= "Framemask";
7700 goto cp0_unimplemented
;
7703 case CP0_REGISTER_22
:
7704 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7705 register_name
= "'Diagnostic"; /* implementation dependent */
7707 case CP0_REGISTER_23
:
7709 case CP0_REG23__DEBUG
:
7710 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7711 register_name
= "Debug";
7713 case CP0_REG23__TRACECONTROL
:
7714 /* PDtrace support */
7715 /* gen_helper_dmfc0_tracecontrol(arg, cpu_env); */
7716 register_name
= "TraceControl";
7717 goto cp0_unimplemented
;
7718 case CP0_REG23__TRACECONTROL2
:
7719 /* PDtrace support */
7720 /* gen_helper_dmfc0_tracecontrol2(arg, cpu_env); */
7721 register_name
= "TraceControl2";
7722 goto cp0_unimplemented
;
7723 case CP0_REG23__USERTRACEDATA1
:
7724 /* PDtrace support */
7725 /* gen_helper_dmfc0_usertracedata1(arg, cpu_env);*/
7726 register_name
= "UserTraceData1";
7727 goto cp0_unimplemented
;
7728 case CP0_REG23__TRACEIBPC
:
7729 /* PDtrace support */
7730 /* gen_helper_dmfc0_traceibpc(arg, cpu_env); */
7731 register_name
= "TraceIBPC";
7732 goto cp0_unimplemented
;
7733 case CP0_REG23__TRACEDBPC
:
7734 /* PDtrace support */
7735 /* gen_helper_dmfc0_tracedbpc(arg, cpu_env); */
7736 register_name
= "TraceDBPC";
7737 goto cp0_unimplemented
;
7739 goto cp0_unimplemented
;
7742 case CP0_REGISTER_24
:
7744 case CP0_REG24__DEPC
:
7746 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7747 register_name
= "DEPC";
7750 goto cp0_unimplemented
;
7753 case CP0_REGISTER_25
:
7755 case CP0_REG25__PERFCTL0
:
7756 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7757 register_name
= "Performance0";
7759 case CP0_REG25__PERFCNT0
:
7760 /* gen_helper_dmfc0_performance1(arg); */
7761 register_name
= "Performance1";
7762 goto cp0_unimplemented
;
7763 case CP0_REG25__PERFCTL1
:
7764 /* gen_helper_dmfc0_performance2(arg); */
7765 register_name
= "Performance2";
7766 goto cp0_unimplemented
;
7767 case CP0_REG25__PERFCNT1
:
7768 /* gen_helper_dmfc0_performance3(arg); */
7769 register_name
= "Performance3";
7770 goto cp0_unimplemented
;
7771 case CP0_REG25__PERFCTL2
:
7772 /* gen_helper_dmfc0_performance4(arg); */
7773 register_name
= "Performance4";
7774 goto cp0_unimplemented
;
7775 case CP0_REG25__PERFCNT2
:
7776 /* gen_helper_dmfc0_performance5(arg); */
7777 register_name
= "Performance5";
7778 goto cp0_unimplemented
;
7779 case CP0_REG25__PERFCTL3
:
7780 /* gen_helper_dmfc0_performance6(arg); */
7781 register_name
= "Performance6";
7782 goto cp0_unimplemented
;
7783 case CP0_REG25__PERFCNT3
:
7784 /* gen_helper_dmfc0_performance7(arg); */
7785 register_name
= "Performance7";
7786 goto cp0_unimplemented
;
7788 goto cp0_unimplemented
;
7791 case CP0_REGISTER_26
:
7793 case CP0_REG26__ERRCTL
:
7794 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7795 register_name
= "ErrCtl";
7798 goto cp0_unimplemented
;
7801 case CP0_REGISTER_27
:
7804 case CP0_REG27__CACHERR
:
7805 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7806 register_name
= "CacheErr";
7809 goto cp0_unimplemented
;
7812 case CP0_REGISTER_28
:
7814 case CP0_REG28__TAGLO
:
7815 case CP0_REG28__TAGLO1
:
7816 case CP0_REG28__TAGLO2
:
7817 case CP0_REG28__TAGLO3
:
7818 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
7819 register_name
= "TagLo";
7821 case CP0_REG28__DATALO
:
7822 case CP0_REG28__DATALO1
:
7823 case CP0_REG28__DATALO2
:
7824 case CP0_REG28__DATALO3
:
7825 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7826 register_name
= "DataLo";
7829 goto cp0_unimplemented
;
7832 case CP0_REGISTER_29
:
7834 case CP0_REG29__TAGHI
:
7835 case CP0_REG29__TAGHI1
:
7836 case CP0_REG29__TAGHI2
:
7837 case CP0_REG29__TAGHI3
:
7838 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7839 register_name
= "TagHi";
7841 case CP0_REG29__DATAHI
:
7842 case CP0_REG29__DATAHI1
:
7843 case CP0_REG29__DATAHI2
:
7844 case CP0_REG29__DATAHI3
:
7845 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7846 register_name
= "DataHi";
7849 goto cp0_unimplemented
;
7852 case CP0_REGISTER_30
:
7854 case CP0_REG30__ERROREPC
:
7855 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7856 register_name
= "ErrorEPC";
7859 goto cp0_unimplemented
;
7862 case CP0_REGISTER_31
:
7864 case CP0_REG31__DESAVE
:
7866 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7867 register_name
= "DESAVE";
7869 case CP0_REG31__KSCRATCH1
:
7870 case CP0_REG31__KSCRATCH2
:
7871 case CP0_REG31__KSCRATCH3
:
7872 case CP0_REG31__KSCRATCH4
:
7873 case CP0_REG31__KSCRATCH5
:
7874 case CP0_REG31__KSCRATCH6
:
7875 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7876 tcg_gen_ld_tl(arg
, cpu_env
,
7877 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7878 register_name
= "KScratch";
7881 goto cp0_unimplemented
;
7885 goto cp0_unimplemented
;
7887 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
7891 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
7892 register_name
, reg
, sel
);
7893 gen_mfc0_unimplemented(ctx
, arg
);
7896 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7898 const char *register_name
= "invalid";
7901 check_insn(ctx
, ISA_MIPS_R1
);
7904 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7909 case CP0_REGISTER_00
:
7911 case CP0_REG00__INDEX
:
7912 gen_helper_mtc0_index(cpu_env
, arg
);
7913 register_name
= "Index";
7915 case CP0_REG00__MVPCONTROL
:
7916 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7917 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7918 register_name
= "MVPControl";
7920 case CP0_REG00__MVPCONF0
:
7921 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7923 register_name
= "MVPConf0";
7925 case CP0_REG00__MVPCONF1
:
7926 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7928 register_name
= "MVPConf1";
7930 case CP0_REG00__VPCONTROL
:
7933 register_name
= "VPControl";
7936 goto cp0_unimplemented
;
7939 case CP0_REGISTER_01
:
7941 case CP0_REG01__RANDOM
:
7943 register_name
= "Random";
7945 case CP0_REG01__VPECONTROL
:
7946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7947 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7948 register_name
= "VPEControl";
7950 case CP0_REG01__VPECONF0
:
7951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7952 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7953 register_name
= "VPEConf0";
7955 case CP0_REG01__VPECONF1
:
7956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7957 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7958 register_name
= "VPEConf1";
7960 case CP0_REG01__YQMASK
:
7961 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7962 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7963 register_name
= "YQMask";
7965 case CP0_REG01__VPESCHEDULE
:
7966 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7967 tcg_gen_st_tl(arg
, cpu_env
,
7968 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7969 register_name
= "VPESchedule";
7971 case CP0_REG01__VPESCHEFBACK
:
7972 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7973 tcg_gen_st_tl(arg
, cpu_env
,
7974 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7975 register_name
= "VPEScheFBack";
7977 case CP0_REG01__VPEOPT
:
7978 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7979 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7980 register_name
= "VPEOpt";
7983 goto cp0_unimplemented
;
7986 case CP0_REGISTER_02
:
7988 case CP0_REG02__ENTRYLO0
:
7989 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7990 register_name
= "EntryLo0";
7992 case CP0_REG02__TCSTATUS
:
7993 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7994 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7995 register_name
= "TCStatus";
7997 case CP0_REG02__TCBIND
:
7998 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7999 gen_helper_mtc0_tcbind(cpu_env
, arg
);
8000 register_name
= "TCBind";
8002 case CP0_REG02__TCRESTART
:
8003 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8004 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
8005 register_name
= "TCRestart";
8007 case CP0_REG02__TCHALT
:
8008 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8009 gen_helper_mtc0_tchalt(cpu_env
, arg
);
8010 register_name
= "TCHalt";
8012 case CP0_REG02__TCCONTEXT
:
8013 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8014 gen_helper_mtc0_tccontext(cpu_env
, arg
);
8015 register_name
= "TCContext";
8017 case CP0_REG02__TCSCHEDULE
:
8018 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8019 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
8020 register_name
= "TCSchedule";
8022 case CP0_REG02__TCSCHEFBACK
:
8023 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8024 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
8025 register_name
= "TCScheFBack";
8028 goto cp0_unimplemented
;
8031 case CP0_REGISTER_03
:
8033 case CP0_REG03__ENTRYLO1
:
8034 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
8035 register_name
= "EntryLo1";
8037 case CP0_REG03__GLOBALNUM
:
8040 register_name
= "GlobalNumber";
8043 goto cp0_unimplemented
;
8046 case CP0_REGISTER_04
:
8048 case CP0_REG04__CONTEXT
:
8049 gen_helper_mtc0_context(cpu_env
, arg
);
8050 register_name
= "Context";
8052 case CP0_REG04__CONTEXTCONFIG
:
8054 /* gen_helper_dmtc0_contextconfig(arg); */
8055 register_name
= "ContextConfig";
8056 goto cp0_unimplemented
;
8057 case CP0_REG04__USERLOCAL
:
8058 CP0_CHECK(ctx
->ulri
);
8059 tcg_gen_st_tl(arg
, cpu_env
,
8060 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8061 register_name
= "UserLocal";
8063 case CP0_REG04__MMID
:
8065 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
8066 register_name
= "MMID";
8069 goto cp0_unimplemented
;
8072 case CP0_REGISTER_05
:
8074 case CP0_REG05__PAGEMASK
:
8075 gen_helper_mtc0_pagemask(cpu_env
, arg
);
8076 register_name
= "PageMask";
8078 case CP0_REG05__PAGEGRAIN
:
8079 check_insn(ctx
, ISA_MIPS_R2
);
8080 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
8081 register_name
= "PageGrain";
8083 case CP0_REG05__SEGCTL0
:
8085 gen_helper_mtc0_segctl0(cpu_env
, arg
);
8086 register_name
= "SegCtl0";
8088 case CP0_REG05__SEGCTL1
:
8090 gen_helper_mtc0_segctl1(cpu_env
, arg
);
8091 register_name
= "SegCtl1";
8093 case CP0_REG05__SEGCTL2
:
8095 gen_helper_mtc0_segctl2(cpu_env
, arg
);
8096 register_name
= "SegCtl2";
8098 case CP0_REG05__PWBASE
:
8100 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8101 register_name
= "PWBase";
8103 case CP0_REG05__PWFIELD
:
8105 gen_helper_mtc0_pwfield(cpu_env
, arg
);
8106 register_name
= "PWField";
8108 case CP0_REG05__PWSIZE
:
8110 gen_helper_mtc0_pwsize(cpu_env
, arg
);
8111 register_name
= "PWSize";
8114 goto cp0_unimplemented
;
8117 case CP0_REGISTER_06
:
8119 case CP0_REG06__WIRED
:
8120 gen_helper_mtc0_wired(cpu_env
, arg
);
8121 register_name
= "Wired";
8123 case CP0_REG06__SRSCONF0
:
8124 check_insn(ctx
, ISA_MIPS_R2
);
8125 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
8126 register_name
= "SRSConf0";
8128 case CP0_REG06__SRSCONF1
:
8129 check_insn(ctx
, ISA_MIPS_R2
);
8130 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
8131 register_name
= "SRSConf1";
8133 case CP0_REG06__SRSCONF2
:
8134 check_insn(ctx
, ISA_MIPS_R2
);
8135 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
8136 register_name
= "SRSConf2";
8138 case CP0_REG06__SRSCONF3
:
8139 check_insn(ctx
, ISA_MIPS_R2
);
8140 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
8141 register_name
= "SRSConf3";
8143 case CP0_REG06__SRSCONF4
:
8144 check_insn(ctx
, ISA_MIPS_R2
);
8145 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
8146 register_name
= "SRSConf4";
8148 case CP0_REG06__PWCTL
:
8150 gen_helper_mtc0_pwctl(cpu_env
, arg
);
8151 register_name
= "PWCtl";
8154 goto cp0_unimplemented
;
8157 case CP0_REGISTER_07
:
8159 case CP0_REG07__HWRENA
:
8160 check_insn(ctx
, ISA_MIPS_R2
);
8161 gen_helper_mtc0_hwrena(cpu_env
, arg
);
8162 ctx
->base
.is_jmp
= DISAS_STOP
;
8163 register_name
= "HWREna";
8166 goto cp0_unimplemented
;
8169 case CP0_REGISTER_08
:
8171 case CP0_REG08__BADVADDR
:
8173 register_name
= "BadVAddr";
8175 case CP0_REG08__BADINSTR
:
8177 register_name
= "BadInstr";
8179 case CP0_REG08__BADINSTRP
:
8181 register_name
= "BadInstrP";
8183 case CP0_REG08__BADINSTRX
:
8185 register_name
= "BadInstrX";
8188 goto cp0_unimplemented
;
8191 case CP0_REGISTER_09
:
8193 case CP0_REG09__COUNT
:
8194 gen_helper_mtc0_count(cpu_env
, arg
);
8195 register_name
= "Count";
8197 case CP0_REG09__SAARI
:
8198 CP0_CHECK(ctx
->saar
);
8199 gen_helper_mtc0_saari(cpu_env
, arg
);
8200 register_name
= "SAARI";
8202 case CP0_REG09__SAAR
:
8203 CP0_CHECK(ctx
->saar
);
8204 gen_helper_mtc0_saar(cpu_env
, arg
);
8205 register_name
= "SAAR";
8208 goto cp0_unimplemented
;
8210 /* Stop translation as we may have switched the execution mode */
8211 ctx
->base
.is_jmp
= DISAS_STOP
;
8213 case CP0_REGISTER_10
:
8215 case CP0_REG10__ENTRYHI
:
8216 gen_helper_mtc0_entryhi(cpu_env
, arg
);
8217 register_name
= "EntryHi";
8220 goto cp0_unimplemented
;
8223 case CP0_REGISTER_11
:
8225 case CP0_REG11__COMPARE
:
8226 gen_helper_mtc0_compare(cpu_env
, arg
);
8227 register_name
= "Compare";
8229 /* 6,7 are implementation dependent */
8231 goto cp0_unimplemented
;
8233 /* Stop translation as we may have switched the execution mode */
8234 ctx
->base
.is_jmp
= DISAS_STOP
;
8236 case CP0_REGISTER_12
:
8238 case CP0_REG12__STATUS
:
8239 save_cpu_state(ctx
, 1);
8240 gen_helper_mtc0_status(cpu_env
, arg
);
8241 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8242 gen_save_pc(ctx
->base
.pc_next
+ 4);
8243 ctx
->base
.is_jmp
= DISAS_EXIT
;
8244 register_name
= "Status";
8246 case CP0_REG12__INTCTL
:
8247 check_insn(ctx
, ISA_MIPS_R2
);
8248 gen_helper_mtc0_intctl(cpu_env
, arg
);
8249 /* Stop translation as we may have switched the execution mode */
8250 ctx
->base
.is_jmp
= DISAS_STOP
;
8251 register_name
= "IntCtl";
8253 case CP0_REG12__SRSCTL
:
8254 check_insn(ctx
, ISA_MIPS_R2
);
8255 gen_helper_mtc0_srsctl(cpu_env
, arg
);
8256 /* Stop translation as we may have switched the execution mode */
8257 ctx
->base
.is_jmp
= DISAS_STOP
;
8258 register_name
= "SRSCtl";
8260 case CP0_REG12__SRSMAP
:
8261 check_insn(ctx
, ISA_MIPS_R2
);
8262 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8263 /* Stop translation as we may have switched the execution mode */
8264 ctx
->base
.is_jmp
= DISAS_STOP
;
8265 register_name
= "SRSMap";
8268 goto cp0_unimplemented
;
8271 case CP0_REGISTER_13
:
8273 case CP0_REG13__CAUSE
:
8274 save_cpu_state(ctx
, 1);
8275 gen_helper_mtc0_cause(cpu_env
, arg
);
8277 * Stop translation as we may have triggered an interrupt.
8278 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8279 * translated code to check for pending interrupts.
8281 gen_save_pc(ctx
->base
.pc_next
+ 4);
8282 ctx
->base
.is_jmp
= DISAS_EXIT
;
8283 register_name
= "Cause";
8286 goto cp0_unimplemented
;
8289 case CP0_REGISTER_14
:
8291 case CP0_REG14__EPC
:
8292 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8293 register_name
= "EPC";
8296 goto cp0_unimplemented
;
8299 case CP0_REGISTER_15
:
8301 case CP0_REG15__PRID
:
8303 register_name
= "PRid";
8305 case CP0_REG15__EBASE
:
8306 check_insn(ctx
, ISA_MIPS_R2
);
8307 gen_helper_mtc0_ebase(cpu_env
, arg
);
8308 register_name
= "EBase";
8311 goto cp0_unimplemented
;
8314 case CP0_REGISTER_16
:
8316 case CP0_REG16__CONFIG
:
8317 gen_helper_mtc0_config0(cpu_env
, arg
);
8318 register_name
= "Config";
8319 /* Stop translation as we may have switched the execution mode */
8320 ctx
->base
.is_jmp
= DISAS_STOP
;
8322 case CP0_REG16__CONFIG1
:
8323 /* ignored, read only */
8324 register_name
= "Config1";
8326 case CP0_REG16__CONFIG2
:
8327 gen_helper_mtc0_config2(cpu_env
, arg
);
8328 register_name
= "Config2";
8329 /* Stop translation as we may have switched the execution mode */
8330 ctx
->base
.is_jmp
= DISAS_STOP
;
8332 case CP0_REG16__CONFIG3
:
8333 gen_helper_mtc0_config3(cpu_env
, arg
);
8334 register_name
= "Config3";
8335 /* Stop translation as we may have switched the execution mode */
8336 ctx
->base
.is_jmp
= DISAS_STOP
;
8338 case CP0_REG16__CONFIG4
:
8339 /* currently ignored */
8340 register_name
= "Config4";
8342 case CP0_REG16__CONFIG5
:
8343 gen_helper_mtc0_config5(cpu_env
, arg
);
8344 register_name
= "Config5";
8345 /* Stop translation as we may have switched the execution mode */
8346 ctx
->base
.is_jmp
= DISAS_STOP
;
8348 /* 6,7 are implementation dependent */
8350 register_name
= "Invalid config selector";
8351 goto cp0_unimplemented
;
8354 case CP0_REGISTER_17
:
8356 case CP0_REG17__LLADDR
:
8357 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8358 register_name
= "LLAddr";
8360 case CP0_REG17__MAAR
:
8361 CP0_CHECK(ctx
->mrp
);
8362 gen_helper_mtc0_maar(cpu_env
, arg
);
8363 register_name
= "MAAR";
8365 case CP0_REG17__MAARI
:
8366 CP0_CHECK(ctx
->mrp
);
8367 gen_helper_mtc0_maari(cpu_env
, arg
);
8368 register_name
= "MAARI";
8371 goto cp0_unimplemented
;
8374 case CP0_REGISTER_18
:
8376 case CP0_REG18__WATCHLO0
:
8377 case CP0_REG18__WATCHLO1
:
8378 case CP0_REG18__WATCHLO2
:
8379 case CP0_REG18__WATCHLO3
:
8380 case CP0_REG18__WATCHLO4
:
8381 case CP0_REG18__WATCHLO5
:
8382 case CP0_REG18__WATCHLO6
:
8383 case CP0_REG18__WATCHLO7
:
8384 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8385 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8386 register_name
= "WatchLo";
8389 goto cp0_unimplemented
;
8392 case CP0_REGISTER_19
:
8394 case CP0_REG19__WATCHHI0
:
8395 case CP0_REG19__WATCHHI1
:
8396 case CP0_REG19__WATCHHI2
:
8397 case CP0_REG19__WATCHHI3
:
8398 case CP0_REG19__WATCHHI4
:
8399 case CP0_REG19__WATCHHI5
:
8400 case CP0_REG19__WATCHHI6
:
8401 case CP0_REG19__WATCHHI7
:
8402 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8403 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8404 register_name
= "WatchHi";
8407 goto cp0_unimplemented
;
8410 case CP0_REGISTER_20
:
8412 case CP0_REG20__XCONTEXT
:
8413 check_insn(ctx
, ISA_MIPS3
);
8414 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8415 register_name
= "XContext";
8418 goto cp0_unimplemented
;
8421 case CP0_REGISTER_21
:
8422 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8423 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8426 gen_helper_mtc0_framemask(cpu_env
, arg
);
8427 register_name
= "Framemask";
8430 goto cp0_unimplemented
;
8433 case CP0_REGISTER_22
:
8435 register_name
= "Diagnostic"; /* implementation dependent */
8437 case CP0_REGISTER_23
:
8439 case CP0_REG23__DEBUG
:
8440 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8441 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8442 gen_save_pc(ctx
->base
.pc_next
+ 4);
8443 ctx
->base
.is_jmp
= DISAS_EXIT
;
8444 register_name
= "Debug";
8446 case CP0_REG23__TRACECONTROL
:
8447 /* PDtrace support */
8448 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
8449 /* Stop translation as we may have switched the execution mode */
8450 ctx
->base
.is_jmp
= DISAS_STOP
;
8451 register_name
= "TraceControl";
8452 goto cp0_unimplemented
;
8453 case CP0_REG23__TRACECONTROL2
:
8454 /* PDtrace support */
8455 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
8456 /* Stop translation as we may have switched the execution mode */
8457 ctx
->base
.is_jmp
= DISAS_STOP
;
8458 register_name
= "TraceControl2";
8459 goto cp0_unimplemented
;
8460 case CP0_REG23__USERTRACEDATA1
:
8461 /* PDtrace support */
8462 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
8463 /* Stop translation as we may have switched the execution mode */
8464 ctx
->base
.is_jmp
= DISAS_STOP
;
8465 register_name
= "UserTraceData1";
8466 goto cp0_unimplemented
;
8467 case CP0_REG23__TRACEIBPC
:
8468 /* PDtrace support */
8469 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
8470 /* Stop translation as we may have switched the execution mode */
8471 ctx
->base
.is_jmp
= DISAS_STOP
;
8472 register_name
= "TraceIBPC";
8473 goto cp0_unimplemented
;
8474 case CP0_REG23__TRACEDBPC
:
8475 /* PDtrace support */
8476 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
8477 /* Stop translation as we may have switched the execution mode */
8478 ctx
->base
.is_jmp
= DISAS_STOP
;
8479 register_name
= "TraceDBPC";
8480 goto cp0_unimplemented
;
8482 goto cp0_unimplemented
;
8485 case CP0_REGISTER_24
:
8487 case CP0_REG24__DEPC
:
8489 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8490 register_name
= "DEPC";
8493 goto cp0_unimplemented
;
8496 case CP0_REGISTER_25
:
8498 case CP0_REG25__PERFCTL0
:
8499 gen_helper_mtc0_performance0(cpu_env
, arg
);
8500 register_name
= "Performance0";
8502 case CP0_REG25__PERFCNT0
:
8503 /* gen_helper_mtc0_performance1(cpu_env, arg); */
8504 register_name
= "Performance1";
8505 goto cp0_unimplemented
;
8506 case CP0_REG25__PERFCTL1
:
8507 /* gen_helper_mtc0_performance2(cpu_env, arg); */
8508 register_name
= "Performance2";
8509 goto cp0_unimplemented
;
8510 case CP0_REG25__PERFCNT1
:
8511 /* gen_helper_mtc0_performance3(cpu_env, arg); */
8512 register_name
= "Performance3";
8513 goto cp0_unimplemented
;
8514 case CP0_REG25__PERFCTL2
:
8515 /* gen_helper_mtc0_performance4(cpu_env, arg); */
8516 register_name
= "Performance4";
8517 goto cp0_unimplemented
;
8518 case CP0_REG25__PERFCNT2
:
8519 /* gen_helper_mtc0_performance5(cpu_env, arg); */
8520 register_name
= "Performance5";
8521 goto cp0_unimplemented
;
8522 case CP0_REG25__PERFCTL3
:
8523 /* gen_helper_mtc0_performance6(cpu_env, arg); */
8524 register_name
= "Performance6";
8525 goto cp0_unimplemented
;
8526 case CP0_REG25__PERFCNT3
:
8527 /* gen_helper_mtc0_performance7(cpu_env, arg); */
8528 register_name
= "Performance7";
8529 goto cp0_unimplemented
;
8531 goto cp0_unimplemented
;
8534 case CP0_REGISTER_26
:
8536 case CP0_REG26__ERRCTL
:
8537 gen_helper_mtc0_errctl(cpu_env
, arg
);
8538 ctx
->base
.is_jmp
= DISAS_STOP
;
8539 register_name
= "ErrCtl";
8542 goto cp0_unimplemented
;
8545 case CP0_REGISTER_27
:
8547 case CP0_REG27__CACHERR
:
8549 register_name
= "CacheErr";
8552 goto cp0_unimplemented
;
8555 case CP0_REGISTER_28
:
8557 case CP0_REG28__TAGLO
:
8558 case CP0_REG28__TAGLO1
:
8559 case CP0_REG28__TAGLO2
:
8560 case CP0_REG28__TAGLO3
:
8561 gen_helper_mtc0_taglo(cpu_env
, arg
);
8562 register_name
= "TagLo";
8564 case CP0_REG28__DATALO
:
8565 case CP0_REG28__DATALO1
:
8566 case CP0_REG28__DATALO2
:
8567 case CP0_REG28__DATALO3
:
8568 gen_helper_mtc0_datalo(cpu_env
, arg
);
8569 register_name
= "DataLo";
8572 goto cp0_unimplemented
;
8575 case CP0_REGISTER_29
:
8577 case CP0_REG29__TAGHI
:
8578 case CP0_REG29__TAGHI1
:
8579 case CP0_REG29__TAGHI2
:
8580 case CP0_REG29__TAGHI3
:
8581 gen_helper_mtc0_taghi(cpu_env
, arg
);
8582 register_name
= "TagHi";
8584 case CP0_REG29__DATAHI
:
8585 case CP0_REG29__DATAHI1
:
8586 case CP0_REG29__DATAHI2
:
8587 case CP0_REG29__DATAHI3
:
8588 gen_helper_mtc0_datahi(cpu_env
, arg
);
8589 register_name
= "DataHi";
8592 register_name
= "invalid sel";
8593 goto cp0_unimplemented
;
8596 case CP0_REGISTER_30
:
8598 case CP0_REG30__ERROREPC
:
8599 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8600 register_name
= "ErrorEPC";
8603 goto cp0_unimplemented
;
8606 case CP0_REGISTER_31
:
8608 case CP0_REG31__DESAVE
:
8610 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8611 register_name
= "DESAVE";
8613 case CP0_REG31__KSCRATCH1
:
8614 case CP0_REG31__KSCRATCH2
:
8615 case CP0_REG31__KSCRATCH3
:
8616 case CP0_REG31__KSCRATCH4
:
8617 case CP0_REG31__KSCRATCH5
:
8618 case CP0_REG31__KSCRATCH6
:
8619 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8620 tcg_gen_st_tl(arg
, cpu_env
,
8621 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
8622 register_name
= "KScratch";
8625 goto cp0_unimplemented
;
8629 goto cp0_unimplemented
;
8631 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
8633 /* For simplicity assume that all writes can cause interrupts. */
8634 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8636 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8637 * translated code to check for pending interrupts.
8639 gen_save_pc(ctx
->base
.pc_next
+ 4);
8640 ctx
->base
.is_jmp
= DISAS_EXIT
;
8645 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
8646 register_name
, reg
, sel
);
8648 #endif /* TARGET_MIPS64 */
8650 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
8651 int u
, int sel
, int h
)
8653 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8654 TCGv t0
= tcg_temp_local_new();
8656 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8657 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8658 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8659 tcg_gen_movi_tl(t0
, -1);
8660 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8661 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8662 tcg_gen_movi_tl(t0
, -1);
8663 } else if (u
== 0) {
8668 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
8671 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
8681 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
8684 gen_helper_mftc0_tcbind(t0
, cpu_env
);
8687 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
8690 gen_helper_mftc0_tchalt(t0
, cpu_env
);
8693 gen_helper_mftc0_tccontext(t0
, cpu_env
);
8696 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
8699 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
8702 gen_mfc0(ctx
, t0
, rt
, sel
);
8709 gen_helper_mftc0_entryhi(t0
, cpu_env
);
8712 gen_mfc0(ctx
, t0
, rt
, sel
);
8719 gen_helper_mftc0_status(t0
, cpu_env
);
8722 gen_mfc0(ctx
, t0
, rt
, sel
);
8729 gen_helper_mftc0_cause(t0
, cpu_env
);
8739 gen_helper_mftc0_epc(t0
, cpu_env
);
8749 gen_helper_mftc0_ebase(t0
, cpu_env
);
8766 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
8776 gen_helper_mftc0_debug(t0
, cpu_env
);
8779 gen_mfc0(ctx
, t0
, rt
, sel
);
8784 gen_mfc0(ctx
, t0
, rt
, sel
);
8788 /* GPR registers. */
8790 gen_helper_1e0i(mftgpr
, t0
, rt
);
8792 /* Auxiliary CPU registers */
8796 gen_helper_1e0i(mftlo
, t0
, 0);
8799 gen_helper_1e0i(mfthi
, t0
, 0);
8802 gen_helper_1e0i(mftacx
, t0
, 0);
8805 gen_helper_1e0i(mftlo
, t0
, 1);
8808 gen_helper_1e0i(mfthi
, t0
, 1);
8811 gen_helper_1e0i(mftacx
, t0
, 1);
8814 gen_helper_1e0i(mftlo
, t0
, 2);
8817 gen_helper_1e0i(mfthi
, t0
, 2);
8820 gen_helper_1e0i(mftacx
, t0
, 2);
8823 gen_helper_1e0i(mftlo
, t0
, 3);
8826 gen_helper_1e0i(mfthi
, t0
, 3);
8829 gen_helper_1e0i(mftacx
, t0
, 3);
8832 gen_helper_mftdsp(t0
, cpu_env
);
8838 /* Floating point (COP1). */
8840 /* XXX: For now we support only a single FPU context. */
8842 TCGv_i32 fp0
= tcg_temp_new_i32();
8844 gen_load_fpr32(ctx
, fp0
, rt
);
8845 tcg_gen_ext_i32_tl(t0
, fp0
);
8846 tcg_temp_free_i32(fp0
);
8848 TCGv_i32 fp0
= tcg_temp_new_i32();
8850 gen_load_fpr32h(ctx
, fp0
, rt
);
8851 tcg_gen_ext_i32_tl(t0
, fp0
);
8852 tcg_temp_free_i32(fp0
);
8856 /* XXX: For now we support only a single FPU context. */
8857 gen_helper_1e0i(cfc1
, t0
, rt
);
8859 /* COP2: Not implemented. */
8867 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
8868 gen_store_gpr(t0
, rd
);
8874 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
8875 gen_reserved_instruction(ctx
);
8878 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
8879 int u
, int sel
, int h
)
8881 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
8882 TCGv t0
= tcg_temp_local_new();
8884 gen_load_gpr(t0
, rt
);
8885 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
8886 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
8887 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
8890 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
8891 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
8894 } else if (u
== 0) {
8899 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
8902 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
8912 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
8915 gen_helper_mttc0_tcbind(cpu_env
, t0
);
8918 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
8921 gen_helper_mttc0_tchalt(cpu_env
, t0
);
8924 gen_helper_mttc0_tccontext(cpu_env
, t0
);
8927 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
8930 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
8933 gen_mtc0(ctx
, t0
, rd
, sel
);
8940 gen_helper_mttc0_entryhi(cpu_env
, t0
);
8943 gen_mtc0(ctx
, t0
, rd
, sel
);
8950 gen_helper_mttc0_status(cpu_env
, t0
);
8953 gen_mtc0(ctx
, t0
, rd
, sel
);
8960 gen_helper_mttc0_cause(cpu_env
, t0
);
8970 gen_helper_mttc0_ebase(cpu_env
, t0
);
8980 gen_helper_mttc0_debug(cpu_env
, t0
);
8983 gen_mtc0(ctx
, t0
, rd
, sel
);
8988 gen_mtc0(ctx
, t0
, rd
, sel
);
8992 /* GPR registers. */
8994 gen_helper_0e1i(mttgpr
, t0
, rd
);
8996 /* Auxiliary CPU registers */
9000 gen_helper_0e1i(mttlo
, t0
, 0);
9003 gen_helper_0e1i(mtthi
, t0
, 0);
9006 gen_helper_0e1i(mttacx
, t0
, 0);
9009 gen_helper_0e1i(mttlo
, t0
, 1);
9012 gen_helper_0e1i(mtthi
, t0
, 1);
9015 gen_helper_0e1i(mttacx
, t0
, 1);
9018 gen_helper_0e1i(mttlo
, t0
, 2);
9021 gen_helper_0e1i(mtthi
, t0
, 2);
9024 gen_helper_0e1i(mttacx
, t0
, 2);
9027 gen_helper_0e1i(mttlo
, t0
, 3);
9030 gen_helper_0e1i(mtthi
, t0
, 3);
9033 gen_helper_0e1i(mttacx
, t0
, 3);
9036 gen_helper_mttdsp(cpu_env
, t0
);
9042 /* Floating point (COP1). */
9044 /* XXX: For now we support only a single FPU context. */
9046 TCGv_i32 fp0
= tcg_temp_new_i32();
9048 tcg_gen_trunc_tl_i32(fp0
, t0
);
9049 gen_store_fpr32(ctx
, fp0
, rd
);
9050 tcg_temp_free_i32(fp0
);
9052 TCGv_i32 fp0
= tcg_temp_new_i32();
9054 tcg_gen_trunc_tl_i32(fp0
, t0
);
9055 gen_store_fpr32h(ctx
, fp0
, rd
);
9056 tcg_temp_free_i32(fp0
);
9060 /* XXX: For now we support only a single FPU context. */
9061 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(rd
), rt
);
9062 /* Stop translation as we may have changed hflags */
9063 ctx
->base
.is_jmp
= DISAS_STOP
;
9065 /* COP2: Not implemented. */
9073 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
9079 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
9080 gen_reserved_instruction(ctx
);
9083 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
9086 const char *opn
= "ldst";
9088 check_cp0_enabled(ctx
);
9095 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9100 TCGv t0
= tcg_temp_new();
9102 gen_load_gpr(t0
, rt
);
9103 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9108 #if defined(TARGET_MIPS64)
9110 check_insn(ctx
, ISA_MIPS3
);
9115 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9119 check_insn(ctx
, ISA_MIPS3
);
9121 TCGv t0
= tcg_temp_new();
9123 gen_load_gpr(t0
, rt
);
9124 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9136 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
9142 TCGv t0
= tcg_temp_new();
9143 gen_load_gpr(t0
, rt
);
9144 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
9150 check_cp0_enabled(ctx
);
9155 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
9156 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9160 check_cp0_enabled(ctx
);
9161 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
9162 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
9167 if (!env
->tlb
->helper_tlbwi
) {
9170 gen_helper_tlbwi(cpu_env
);
9175 if (!env
->tlb
->helper_tlbinv
) {
9178 gen_helper_tlbinv(cpu_env
);
9179 } /* treat as nop if TLBINV not supported */
9184 if (!env
->tlb
->helper_tlbinvf
) {
9187 gen_helper_tlbinvf(cpu_env
);
9188 } /* treat as nop if TLBINV not supported */
9192 if (!env
->tlb
->helper_tlbwr
) {
9195 gen_helper_tlbwr(cpu_env
);
9199 if (!env
->tlb
->helper_tlbp
) {
9202 gen_helper_tlbp(cpu_env
);
9206 if (!env
->tlb
->helper_tlbr
) {
9209 gen_helper_tlbr(cpu_env
);
9211 case OPC_ERET
: /* OPC_ERETNC */
9212 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9213 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9216 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
9217 if (ctx
->opcode
& (1 << bit_shift
)) {
9220 check_insn(ctx
, ISA_MIPS_R5
);
9221 gen_helper_eretnc(cpu_env
);
9225 check_insn(ctx
, ISA_MIPS2
);
9226 gen_helper_eret(cpu_env
);
9228 ctx
->base
.is_jmp
= DISAS_EXIT
;
9233 check_insn(ctx
, ISA_MIPS_R1
);
9234 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9235 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9238 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9240 gen_reserved_instruction(ctx
);
9242 gen_helper_deret(cpu_env
);
9243 ctx
->base
.is_jmp
= DISAS_EXIT
;
9248 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
9249 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
9250 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9253 /* If we get an exception, we want to restart at next instruction */
9254 ctx
->base
.pc_next
+= 4;
9255 save_cpu_state(ctx
, 1);
9256 ctx
->base
.pc_next
-= 4;
9257 gen_helper_wait(cpu_env
);
9258 ctx
->base
.is_jmp
= DISAS_NORETURN
;
9263 gen_reserved_instruction(ctx
);
9266 (void)opn
; /* avoid a compiler warning */
9268 #endif /* !CONFIG_USER_ONLY */
9270 /* CP1 Branches (before delay slot) */
9271 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
9272 int32_t cc
, int32_t offset
)
9274 target_ulong btarget
;
9275 TCGv_i32 t0
= tcg_temp_new_i32();
9277 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
9278 gen_reserved_instruction(ctx
);
9283 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
9286 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
9290 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9291 tcg_gen_not_i32(t0
, t0
);
9292 tcg_gen_andi_i32(t0
, t0
, 1);
9293 tcg_gen_extu_i32_tl(bcond
, t0
);
9296 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9297 tcg_gen_not_i32(t0
, t0
);
9298 tcg_gen_andi_i32(t0
, t0
, 1);
9299 tcg_gen_extu_i32_tl(bcond
, t0
);
9302 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9303 tcg_gen_andi_i32(t0
, t0
, 1);
9304 tcg_gen_extu_i32_tl(bcond
, t0
);
9307 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9308 tcg_gen_andi_i32(t0
, t0
, 1);
9309 tcg_gen_extu_i32_tl(bcond
, t0
);
9311 ctx
->hflags
|= MIPS_HFLAG_BL
;
9315 TCGv_i32 t1
= tcg_temp_new_i32();
9316 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9317 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9318 tcg_gen_nand_i32(t0
, t0
, t1
);
9319 tcg_temp_free_i32(t1
);
9320 tcg_gen_andi_i32(t0
, t0
, 1);
9321 tcg_gen_extu_i32_tl(bcond
, t0
);
9326 TCGv_i32 t1
= tcg_temp_new_i32();
9327 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9328 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9329 tcg_gen_or_i32(t0
, t0
, t1
);
9330 tcg_temp_free_i32(t1
);
9331 tcg_gen_andi_i32(t0
, t0
, 1);
9332 tcg_gen_extu_i32_tl(bcond
, t0
);
9337 TCGv_i32 t1
= tcg_temp_new_i32();
9338 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9339 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9340 tcg_gen_and_i32(t0
, t0
, t1
);
9341 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9342 tcg_gen_and_i32(t0
, t0
, t1
);
9343 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9344 tcg_gen_nand_i32(t0
, t0
, t1
);
9345 tcg_temp_free_i32(t1
);
9346 tcg_gen_andi_i32(t0
, t0
, 1);
9347 tcg_gen_extu_i32_tl(bcond
, t0
);
9352 TCGv_i32 t1
= tcg_temp_new_i32();
9353 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
9354 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
9355 tcg_gen_or_i32(t0
, t0
, t1
);
9356 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
9357 tcg_gen_or_i32(t0
, t0
, t1
);
9358 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
9359 tcg_gen_or_i32(t0
, t0
, t1
);
9360 tcg_temp_free_i32(t1
);
9361 tcg_gen_andi_i32(t0
, t0
, 1);
9362 tcg_gen_extu_i32_tl(bcond
, t0
);
9365 ctx
->hflags
|= MIPS_HFLAG_BC
;
9368 MIPS_INVAL("cp1 cond branch");
9369 gen_reserved_instruction(ctx
);
9372 ctx
->btarget
= btarget
;
9373 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9375 tcg_temp_free_i32(t0
);
9378 /* R6 CP1 Branches */
9379 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
9380 int32_t ft
, int32_t offset
,
9383 target_ulong btarget
;
9384 TCGv_i64 t0
= tcg_temp_new_i64();
9386 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9387 #ifdef MIPS_DEBUG_DISAS
9388 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
9389 "\n", ctx
->base
.pc_next
);
9391 gen_reserved_instruction(ctx
);
9395 gen_load_fpr64(ctx
, t0
, ft
);
9396 tcg_gen_andi_i64(t0
, t0
, 1);
9398 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
9402 tcg_gen_xori_i64(t0
, t0
, 1);
9403 ctx
->hflags
|= MIPS_HFLAG_BC
;
9406 /* t0 already set */
9407 ctx
->hflags
|= MIPS_HFLAG_BC
;
9410 MIPS_INVAL("cp1 cond branch");
9411 gen_reserved_instruction(ctx
);
9415 tcg_gen_trunc_i64_tl(bcond
, t0
);
9417 ctx
->btarget
= btarget
;
9419 switch (delayslot_size
) {
9421 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
9424 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
9429 tcg_temp_free_i64(t0
);
9432 /* Coprocessor 1 (FPU) */
9434 #define FOP(func, fmt) (((fmt) << 21) | (func))
9437 OPC_ADD_S
= FOP(0, FMT_S
),
9438 OPC_SUB_S
= FOP(1, FMT_S
),
9439 OPC_MUL_S
= FOP(2, FMT_S
),
9440 OPC_DIV_S
= FOP(3, FMT_S
),
9441 OPC_SQRT_S
= FOP(4, FMT_S
),
9442 OPC_ABS_S
= FOP(5, FMT_S
),
9443 OPC_MOV_S
= FOP(6, FMT_S
),
9444 OPC_NEG_S
= FOP(7, FMT_S
),
9445 OPC_ROUND_L_S
= FOP(8, FMT_S
),
9446 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
9447 OPC_CEIL_L_S
= FOP(10, FMT_S
),
9448 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
9449 OPC_ROUND_W_S
= FOP(12, FMT_S
),
9450 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
9451 OPC_CEIL_W_S
= FOP(14, FMT_S
),
9452 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
9453 OPC_SEL_S
= FOP(16, FMT_S
),
9454 OPC_MOVCF_S
= FOP(17, FMT_S
),
9455 OPC_MOVZ_S
= FOP(18, FMT_S
),
9456 OPC_MOVN_S
= FOP(19, FMT_S
),
9457 OPC_SELEQZ_S
= FOP(20, FMT_S
),
9458 OPC_RECIP_S
= FOP(21, FMT_S
),
9459 OPC_RSQRT_S
= FOP(22, FMT_S
),
9460 OPC_SELNEZ_S
= FOP(23, FMT_S
),
9461 OPC_MADDF_S
= FOP(24, FMT_S
),
9462 OPC_MSUBF_S
= FOP(25, FMT_S
),
9463 OPC_RINT_S
= FOP(26, FMT_S
),
9464 OPC_CLASS_S
= FOP(27, FMT_S
),
9465 OPC_MIN_S
= FOP(28, FMT_S
),
9466 OPC_RECIP2_S
= FOP(28, FMT_S
),
9467 OPC_MINA_S
= FOP(29, FMT_S
),
9468 OPC_RECIP1_S
= FOP(29, FMT_S
),
9469 OPC_MAX_S
= FOP(30, FMT_S
),
9470 OPC_RSQRT1_S
= FOP(30, FMT_S
),
9471 OPC_MAXA_S
= FOP(31, FMT_S
),
9472 OPC_RSQRT2_S
= FOP(31, FMT_S
),
9473 OPC_CVT_D_S
= FOP(33, FMT_S
),
9474 OPC_CVT_W_S
= FOP(36, FMT_S
),
9475 OPC_CVT_L_S
= FOP(37, FMT_S
),
9476 OPC_CVT_PS_S
= FOP(38, FMT_S
),
9477 OPC_CMP_F_S
= FOP(48, FMT_S
),
9478 OPC_CMP_UN_S
= FOP(49, FMT_S
),
9479 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
9480 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
9481 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
9482 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
9483 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
9484 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
9485 OPC_CMP_SF_S
= FOP(56, FMT_S
),
9486 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
9487 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
9488 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
9489 OPC_CMP_LT_S
= FOP(60, FMT_S
),
9490 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
9491 OPC_CMP_LE_S
= FOP(62, FMT_S
),
9492 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
9494 OPC_ADD_D
= FOP(0, FMT_D
),
9495 OPC_SUB_D
= FOP(1, FMT_D
),
9496 OPC_MUL_D
= FOP(2, FMT_D
),
9497 OPC_DIV_D
= FOP(3, FMT_D
),
9498 OPC_SQRT_D
= FOP(4, FMT_D
),
9499 OPC_ABS_D
= FOP(5, FMT_D
),
9500 OPC_MOV_D
= FOP(6, FMT_D
),
9501 OPC_NEG_D
= FOP(7, FMT_D
),
9502 OPC_ROUND_L_D
= FOP(8, FMT_D
),
9503 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
9504 OPC_CEIL_L_D
= FOP(10, FMT_D
),
9505 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
9506 OPC_ROUND_W_D
= FOP(12, FMT_D
),
9507 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
9508 OPC_CEIL_W_D
= FOP(14, FMT_D
),
9509 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
9510 OPC_SEL_D
= FOP(16, FMT_D
),
9511 OPC_MOVCF_D
= FOP(17, FMT_D
),
9512 OPC_MOVZ_D
= FOP(18, FMT_D
),
9513 OPC_MOVN_D
= FOP(19, FMT_D
),
9514 OPC_SELEQZ_D
= FOP(20, FMT_D
),
9515 OPC_RECIP_D
= FOP(21, FMT_D
),
9516 OPC_RSQRT_D
= FOP(22, FMT_D
),
9517 OPC_SELNEZ_D
= FOP(23, FMT_D
),
9518 OPC_MADDF_D
= FOP(24, FMT_D
),
9519 OPC_MSUBF_D
= FOP(25, FMT_D
),
9520 OPC_RINT_D
= FOP(26, FMT_D
),
9521 OPC_CLASS_D
= FOP(27, FMT_D
),
9522 OPC_MIN_D
= FOP(28, FMT_D
),
9523 OPC_RECIP2_D
= FOP(28, FMT_D
),
9524 OPC_MINA_D
= FOP(29, FMT_D
),
9525 OPC_RECIP1_D
= FOP(29, FMT_D
),
9526 OPC_MAX_D
= FOP(30, FMT_D
),
9527 OPC_RSQRT1_D
= FOP(30, FMT_D
),
9528 OPC_MAXA_D
= FOP(31, FMT_D
),
9529 OPC_RSQRT2_D
= FOP(31, FMT_D
),
9530 OPC_CVT_S_D
= FOP(32, FMT_D
),
9531 OPC_CVT_W_D
= FOP(36, FMT_D
),
9532 OPC_CVT_L_D
= FOP(37, FMT_D
),
9533 OPC_CMP_F_D
= FOP(48, FMT_D
),
9534 OPC_CMP_UN_D
= FOP(49, FMT_D
),
9535 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
9536 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
9537 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
9538 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
9539 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
9540 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
9541 OPC_CMP_SF_D
= FOP(56, FMT_D
),
9542 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
9543 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
9544 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
9545 OPC_CMP_LT_D
= FOP(60, FMT_D
),
9546 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
9547 OPC_CMP_LE_D
= FOP(62, FMT_D
),
9548 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
9550 OPC_CVT_S_W
= FOP(32, FMT_W
),
9551 OPC_CVT_D_W
= FOP(33, FMT_W
),
9552 OPC_CVT_S_L
= FOP(32, FMT_L
),
9553 OPC_CVT_D_L
= FOP(33, FMT_L
),
9554 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
9556 OPC_ADD_PS
= FOP(0, FMT_PS
),
9557 OPC_SUB_PS
= FOP(1, FMT_PS
),
9558 OPC_MUL_PS
= FOP(2, FMT_PS
),
9559 OPC_DIV_PS
= FOP(3, FMT_PS
),
9560 OPC_ABS_PS
= FOP(5, FMT_PS
),
9561 OPC_MOV_PS
= FOP(6, FMT_PS
),
9562 OPC_NEG_PS
= FOP(7, FMT_PS
),
9563 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
9564 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
9565 OPC_MOVN_PS
= FOP(19, FMT_PS
),
9566 OPC_ADDR_PS
= FOP(24, FMT_PS
),
9567 OPC_MULR_PS
= FOP(26, FMT_PS
),
9568 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
9569 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
9570 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
9571 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
9573 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
9574 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
9575 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
9576 OPC_PLL_PS
= FOP(44, FMT_PS
),
9577 OPC_PLU_PS
= FOP(45, FMT_PS
),
9578 OPC_PUL_PS
= FOP(46, FMT_PS
),
9579 OPC_PUU_PS
= FOP(47, FMT_PS
),
9580 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
9581 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
9582 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
9583 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
9584 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
9585 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
9586 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
9587 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
9588 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
9589 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
9590 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
9591 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
9592 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
9593 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
9594 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
9595 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
9599 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
9600 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
9601 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
9602 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
9603 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
9604 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
9605 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
9606 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
9607 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
9608 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
9609 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
9610 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
9611 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
9612 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
9613 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
9614 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
9615 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
9616 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
9617 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
9618 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
9619 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
9620 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
9622 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
9623 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
9624 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
9625 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
9626 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
9627 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
9628 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
9629 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
9630 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
9631 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
9632 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
9633 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
9634 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
9635 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
9636 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
9637 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
9638 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
9639 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
9640 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
9641 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
9642 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
9643 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
9646 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
9648 TCGv t0
= tcg_temp_new();
9653 TCGv_i32 fp0
= tcg_temp_new_i32();
9655 gen_load_fpr32(ctx
, fp0
, fs
);
9656 tcg_gen_ext_i32_tl(t0
, fp0
);
9657 tcg_temp_free_i32(fp0
);
9659 gen_store_gpr(t0
, rt
);
9662 gen_load_gpr(t0
, rt
);
9664 TCGv_i32 fp0
= tcg_temp_new_i32();
9666 tcg_gen_trunc_tl_i32(fp0
, t0
);
9667 gen_store_fpr32(ctx
, fp0
, fs
);
9668 tcg_temp_free_i32(fp0
);
9672 gen_helper_1e0i(cfc1
, t0
, fs
);
9673 gen_store_gpr(t0
, rt
);
9676 gen_load_gpr(t0
, rt
);
9677 save_cpu_state(ctx
, 0);
9678 gen_helper_0e2i(ctc1
, t0
, tcg_constant_i32(fs
), rt
);
9679 /* Stop translation as we may have changed hflags */
9680 ctx
->base
.is_jmp
= DISAS_STOP
;
9682 #if defined(TARGET_MIPS64)
9684 gen_load_fpr64(ctx
, t0
, fs
);
9685 gen_store_gpr(t0
, rt
);
9688 gen_load_gpr(t0
, rt
);
9689 gen_store_fpr64(ctx
, t0
, fs
);
9694 TCGv_i32 fp0
= tcg_temp_new_i32();
9696 gen_load_fpr32h(ctx
, fp0
, fs
);
9697 tcg_gen_ext_i32_tl(t0
, fp0
);
9698 tcg_temp_free_i32(fp0
);
9700 gen_store_gpr(t0
, rt
);
9703 gen_load_gpr(t0
, rt
);
9705 TCGv_i32 fp0
= tcg_temp_new_i32();
9707 tcg_gen_trunc_tl_i32(fp0
, t0
);
9708 gen_store_fpr32h(ctx
, fp0
, fs
);
9709 tcg_temp_free_i32(fp0
);
9713 MIPS_INVAL("cp1 move");
9714 gen_reserved_instruction(ctx
);
9722 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
9739 l1
= gen_new_label();
9740 t0
= tcg_temp_new_i32();
9741 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9742 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9743 tcg_temp_free_i32(t0
);
9744 gen_load_gpr(cpu_gpr
[rd
], rs
);
9748 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9752 TCGv_i32 t0
= tcg_temp_new_i32();
9753 TCGLabel
*l1
= gen_new_label();
9761 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9762 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9763 gen_load_fpr32(ctx
, t0
, fs
);
9764 gen_store_fpr32(ctx
, t0
, fd
);
9766 tcg_temp_free_i32(t0
);
9769 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
9773 TCGv_i32 t0
= tcg_temp_new_i32();
9775 TCGLabel
*l1
= gen_new_label();
9783 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9784 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9785 tcg_temp_free_i32(t0
);
9786 fp0
= tcg_temp_new_i64();
9787 gen_load_fpr64(ctx
, fp0
, fs
);
9788 gen_store_fpr64(ctx
, fp0
, fd
);
9789 tcg_temp_free_i64(fp0
);
9793 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
9797 TCGv_i32 t0
= tcg_temp_new_i32();
9798 TCGLabel
*l1
= gen_new_label();
9799 TCGLabel
*l2
= gen_new_label();
9807 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
9808 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
9809 gen_load_fpr32(ctx
, t0
, fs
);
9810 gen_store_fpr32(ctx
, t0
, fd
);
9813 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
9814 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
9815 gen_load_fpr32h(ctx
, t0
, fs
);
9816 gen_store_fpr32h(ctx
, t0
, fd
);
9817 tcg_temp_free_i32(t0
);
9821 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9824 TCGv_i32 t1
= tcg_const_i32(0);
9825 TCGv_i32 fp0
= tcg_temp_new_i32();
9826 TCGv_i32 fp1
= tcg_temp_new_i32();
9827 TCGv_i32 fp2
= tcg_temp_new_i32();
9828 gen_load_fpr32(ctx
, fp0
, fd
);
9829 gen_load_fpr32(ctx
, fp1
, ft
);
9830 gen_load_fpr32(ctx
, fp2
, fs
);
9834 tcg_gen_andi_i32(fp0
, fp0
, 1);
9835 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9838 tcg_gen_andi_i32(fp1
, fp1
, 1);
9839 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9842 tcg_gen_andi_i32(fp1
, fp1
, 1);
9843 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9846 MIPS_INVAL("gen_sel_s");
9847 gen_reserved_instruction(ctx
);
9851 gen_store_fpr32(ctx
, fp0
, fd
);
9852 tcg_temp_free_i32(fp2
);
9853 tcg_temp_free_i32(fp1
);
9854 tcg_temp_free_i32(fp0
);
9855 tcg_temp_free_i32(t1
);
9858 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
9861 TCGv_i64 t1
= tcg_const_i64(0);
9862 TCGv_i64 fp0
= tcg_temp_new_i64();
9863 TCGv_i64 fp1
= tcg_temp_new_i64();
9864 TCGv_i64 fp2
= tcg_temp_new_i64();
9865 gen_load_fpr64(ctx
, fp0
, fd
);
9866 gen_load_fpr64(ctx
, fp1
, ft
);
9867 gen_load_fpr64(ctx
, fp2
, fs
);
9871 tcg_gen_andi_i64(fp0
, fp0
, 1);
9872 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
9875 tcg_gen_andi_i64(fp1
, fp1
, 1);
9876 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
9879 tcg_gen_andi_i64(fp1
, fp1
, 1);
9880 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
9883 MIPS_INVAL("gen_sel_d");
9884 gen_reserved_instruction(ctx
);
9888 gen_store_fpr64(ctx
, fp0
, fd
);
9889 tcg_temp_free_i64(fp2
);
9890 tcg_temp_free_i64(fp1
);
9891 tcg_temp_free_i64(fp0
);
9892 tcg_temp_free_i64(t1
);
9895 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
9896 int ft
, int fs
, int fd
, int cc
)
9898 uint32_t func
= ctx
->opcode
& 0x3f;
9902 TCGv_i32 fp0
= tcg_temp_new_i32();
9903 TCGv_i32 fp1
= tcg_temp_new_i32();
9905 gen_load_fpr32(ctx
, fp0
, fs
);
9906 gen_load_fpr32(ctx
, fp1
, ft
);
9907 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
9908 tcg_temp_free_i32(fp1
);
9909 gen_store_fpr32(ctx
, fp0
, fd
);
9910 tcg_temp_free_i32(fp0
);
9915 TCGv_i32 fp0
= tcg_temp_new_i32();
9916 TCGv_i32 fp1
= tcg_temp_new_i32();
9918 gen_load_fpr32(ctx
, fp0
, fs
);
9919 gen_load_fpr32(ctx
, fp1
, ft
);
9920 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
9921 tcg_temp_free_i32(fp1
);
9922 gen_store_fpr32(ctx
, fp0
, fd
);
9923 tcg_temp_free_i32(fp0
);
9928 TCGv_i32 fp0
= tcg_temp_new_i32();
9929 TCGv_i32 fp1
= tcg_temp_new_i32();
9931 gen_load_fpr32(ctx
, fp0
, fs
);
9932 gen_load_fpr32(ctx
, fp1
, ft
);
9933 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
9934 tcg_temp_free_i32(fp1
);
9935 gen_store_fpr32(ctx
, fp0
, fd
);
9936 tcg_temp_free_i32(fp0
);
9941 TCGv_i32 fp0
= tcg_temp_new_i32();
9942 TCGv_i32 fp1
= tcg_temp_new_i32();
9944 gen_load_fpr32(ctx
, fp0
, fs
);
9945 gen_load_fpr32(ctx
, fp1
, ft
);
9946 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
9947 tcg_temp_free_i32(fp1
);
9948 gen_store_fpr32(ctx
, fp0
, fd
);
9949 tcg_temp_free_i32(fp0
);
9954 TCGv_i32 fp0
= tcg_temp_new_i32();
9956 gen_load_fpr32(ctx
, fp0
, fs
);
9957 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
9958 gen_store_fpr32(ctx
, fp0
, fd
);
9959 tcg_temp_free_i32(fp0
);
9964 TCGv_i32 fp0
= tcg_temp_new_i32();
9966 gen_load_fpr32(ctx
, fp0
, fs
);
9968 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
9970 gen_helper_float_abs_s(fp0
, fp0
);
9972 gen_store_fpr32(ctx
, fp0
, fd
);
9973 tcg_temp_free_i32(fp0
);
9978 TCGv_i32 fp0
= tcg_temp_new_i32();
9980 gen_load_fpr32(ctx
, fp0
, fs
);
9981 gen_store_fpr32(ctx
, fp0
, fd
);
9982 tcg_temp_free_i32(fp0
);
9987 TCGv_i32 fp0
= tcg_temp_new_i32();
9989 gen_load_fpr32(ctx
, fp0
, fs
);
9991 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
9993 gen_helper_float_chs_s(fp0
, fp0
);
9995 gen_store_fpr32(ctx
, fp0
, fd
);
9996 tcg_temp_free_i32(fp0
);
10000 check_cp1_64bitmode(ctx
);
10002 TCGv_i32 fp32
= tcg_temp_new_i32();
10003 TCGv_i64 fp64
= tcg_temp_new_i64();
10005 gen_load_fpr32(ctx
, fp32
, fs
);
10006 if (ctx
->nan2008
) {
10007 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
10009 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
10011 tcg_temp_free_i32(fp32
);
10012 gen_store_fpr64(ctx
, fp64
, fd
);
10013 tcg_temp_free_i64(fp64
);
10016 case OPC_TRUNC_L_S
:
10017 check_cp1_64bitmode(ctx
);
10019 TCGv_i32 fp32
= tcg_temp_new_i32();
10020 TCGv_i64 fp64
= tcg_temp_new_i64();
10022 gen_load_fpr32(ctx
, fp32
, fs
);
10023 if (ctx
->nan2008
) {
10024 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
10026 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
10028 tcg_temp_free_i32(fp32
);
10029 gen_store_fpr64(ctx
, fp64
, fd
);
10030 tcg_temp_free_i64(fp64
);
10034 check_cp1_64bitmode(ctx
);
10036 TCGv_i32 fp32
= tcg_temp_new_i32();
10037 TCGv_i64 fp64
= tcg_temp_new_i64();
10039 gen_load_fpr32(ctx
, fp32
, fs
);
10040 if (ctx
->nan2008
) {
10041 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
10043 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
10045 tcg_temp_free_i32(fp32
);
10046 gen_store_fpr64(ctx
, fp64
, fd
);
10047 tcg_temp_free_i64(fp64
);
10050 case OPC_FLOOR_L_S
:
10051 check_cp1_64bitmode(ctx
);
10053 TCGv_i32 fp32
= tcg_temp_new_i32();
10054 TCGv_i64 fp64
= tcg_temp_new_i64();
10056 gen_load_fpr32(ctx
, fp32
, fs
);
10057 if (ctx
->nan2008
) {
10058 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
10060 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
10062 tcg_temp_free_i32(fp32
);
10063 gen_store_fpr64(ctx
, fp64
, fd
);
10064 tcg_temp_free_i64(fp64
);
10067 case OPC_ROUND_W_S
:
10069 TCGv_i32 fp0
= tcg_temp_new_i32();
10071 gen_load_fpr32(ctx
, fp0
, fs
);
10072 if (ctx
->nan2008
) {
10073 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
10075 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
10077 gen_store_fpr32(ctx
, fp0
, fd
);
10078 tcg_temp_free_i32(fp0
);
10081 case OPC_TRUNC_W_S
:
10083 TCGv_i32 fp0
= tcg_temp_new_i32();
10085 gen_load_fpr32(ctx
, fp0
, fs
);
10086 if (ctx
->nan2008
) {
10087 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
10089 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
10091 gen_store_fpr32(ctx
, fp0
, fd
);
10092 tcg_temp_free_i32(fp0
);
10097 TCGv_i32 fp0
= tcg_temp_new_i32();
10099 gen_load_fpr32(ctx
, fp0
, fs
);
10100 if (ctx
->nan2008
) {
10101 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
10103 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
10105 gen_store_fpr32(ctx
, fp0
, fd
);
10106 tcg_temp_free_i32(fp0
);
10109 case OPC_FLOOR_W_S
:
10111 TCGv_i32 fp0
= tcg_temp_new_i32();
10113 gen_load_fpr32(ctx
, fp0
, fs
);
10114 if (ctx
->nan2008
) {
10115 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
10117 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
10119 gen_store_fpr32(ctx
, fp0
, fd
);
10120 tcg_temp_free_i32(fp0
);
10124 check_insn(ctx
, ISA_MIPS_R6
);
10125 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10128 check_insn(ctx
, ISA_MIPS_R6
);
10129 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10132 check_insn(ctx
, ISA_MIPS_R6
);
10133 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
10136 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10137 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10140 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10142 TCGLabel
*l1
= gen_new_label();
10146 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10148 fp0
= tcg_temp_new_i32();
10149 gen_load_fpr32(ctx
, fp0
, fs
);
10150 gen_store_fpr32(ctx
, fp0
, fd
);
10151 tcg_temp_free_i32(fp0
);
10156 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10158 TCGLabel
*l1
= gen_new_label();
10162 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10163 fp0
= tcg_temp_new_i32();
10164 gen_load_fpr32(ctx
, fp0
, fs
);
10165 gen_store_fpr32(ctx
, fp0
, fd
);
10166 tcg_temp_free_i32(fp0
);
10173 TCGv_i32 fp0
= tcg_temp_new_i32();
10175 gen_load_fpr32(ctx
, fp0
, fs
);
10176 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
10177 gen_store_fpr32(ctx
, fp0
, fd
);
10178 tcg_temp_free_i32(fp0
);
10183 TCGv_i32 fp0
= tcg_temp_new_i32();
10185 gen_load_fpr32(ctx
, fp0
, fs
);
10186 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
10187 gen_store_fpr32(ctx
, fp0
, fd
);
10188 tcg_temp_free_i32(fp0
);
10192 check_insn(ctx
, ISA_MIPS_R6
);
10194 TCGv_i32 fp0
= tcg_temp_new_i32();
10195 TCGv_i32 fp1
= tcg_temp_new_i32();
10196 TCGv_i32 fp2
= tcg_temp_new_i32();
10197 gen_load_fpr32(ctx
, fp0
, fs
);
10198 gen_load_fpr32(ctx
, fp1
, ft
);
10199 gen_load_fpr32(ctx
, fp2
, fd
);
10200 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10201 gen_store_fpr32(ctx
, fp2
, fd
);
10202 tcg_temp_free_i32(fp2
);
10203 tcg_temp_free_i32(fp1
);
10204 tcg_temp_free_i32(fp0
);
10208 check_insn(ctx
, ISA_MIPS_R6
);
10210 TCGv_i32 fp0
= tcg_temp_new_i32();
10211 TCGv_i32 fp1
= tcg_temp_new_i32();
10212 TCGv_i32 fp2
= tcg_temp_new_i32();
10213 gen_load_fpr32(ctx
, fp0
, fs
);
10214 gen_load_fpr32(ctx
, fp1
, ft
);
10215 gen_load_fpr32(ctx
, fp2
, fd
);
10216 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10217 gen_store_fpr32(ctx
, fp2
, fd
);
10218 tcg_temp_free_i32(fp2
);
10219 tcg_temp_free_i32(fp1
);
10220 tcg_temp_free_i32(fp0
);
10224 check_insn(ctx
, ISA_MIPS_R6
);
10226 TCGv_i32 fp0
= tcg_temp_new_i32();
10227 gen_load_fpr32(ctx
, fp0
, fs
);
10228 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
10229 gen_store_fpr32(ctx
, fp0
, fd
);
10230 tcg_temp_free_i32(fp0
);
10234 check_insn(ctx
, ISA_MIPS_R6
);
10236 TCGv_i32 fp0
= tcg_temp_new_i32();
10237 gen_load_fpr32(ctx
, fp0
, fs
);
10238 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
10239 gen_store_fpr32(ctx
, fp0
, fd
);
10240 tcg_temp_free_i32(fp0
);
10243 case OPC_MIN_S
: /* OPC_RECIP2_S */
10244 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10246 TCGv_i32 fp0
= tcg_temp_new_i32();
10247 TCGv_i32 fp1
= tcg_temp_new_i32();
10248 TCGv_i32 fp2
= tcg_temp_new_i32();
10249 gen_load_fpr32(ctx
, fp0
, fs
);
10250 gen_load_fpr32(ctx
, fp1
, ft
);
10251 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
10252 gen_store_fpr32(ctx
, fp2
, fd
);
10253 tcg_temp_free_i32(fp2
);
10254 tcg_temp_free_i32(fp1
);
10255 tcg_temp_free_i32(fp0
);
10258 check_cp1_64bitmode(ctx
);
10260 TCGv_i32 fp0
= tcg_temp_new_i32();
10261 TCGv_i32 fp1
= tcg_temp_new_i32();
10263 gen_load_fpr32(ctx
, fp0
, fs
);
10264 gen_load_fpr32(ctx
, fp1
, ft
);
10265 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
10266 tcg_temp_free_i32(fp1
);
10267 gen_store_fpr32(ctx
, fp0
, fd
);
10268 tcg_temp_free_i32(fp0
);
10272 case OPC_MINA_S
: /* OPC_RECIP1_S */
10273 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10275 TCGv_i32 fp0
= tcg_temp_new_i32();
10276 TCGv_i32 fp1
= tcg_temp_new_i32();
10277 TCGv_i32 fp2
= tcg_temp_new_i32();
10278 gen_load_fpr32(ctx
, fp0
, fs
);
10279 gen_load_fpr32(ctx
, fp1
, ft
);
10280 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
10281 gen_store_fpr32(ctx
, fp2
, fd
);
10282 tcg_temp_free_i32(fp2
);
10283 tcg_temp_free_i32(fp1
);
10284 tcg_temp_free_i32(fp0
);
10287 check_cp1_64bitmode(ctx
);
10289 TCGv_i32 fp0
= tcg_temp_new_i32();
10291 gen_load_fpr32(ctx
, fp0
, fs
);
10292 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
10293 gen_store_fpr32(ctx
, fp0
, fd
);
10294 tcg_temp_free_i32(fp0
);
10298 case OPC_MAX_S
: /* OPC_RSQRT1_S */
10299 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10301 TCGv_i32 fp0
= tcg_temp_new_i32();
10302 TCGv_i32 fp1
= tcg_temp_new_i32();
10303 gen_load_fpr32(ctx
, fp0
, fs
);
10304 gen_load_fpr32(ctx
, fp1
, ft
);
10305 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
10306 gen_store_fpr32(ctx
, fp1
, fd
);
10307 tcg_temp_free_i32(fp1
);
10308 tcg_temp_free_i32(fp0
);
10311 check_cp1_64bitmode(ctx
);
10313 TCGv_i32 fp0
= tcg_temp_new_i32();
10315 gen_load_fpr32(ctx
, fp0
, fs
);
10316 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
10317 gen_store_fpr32(ctx
, fp0
, fd
);
10318 tcg_temp_free_i32(fp0
);
10322 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
10323 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10325 TCGv_i32 fp0
= tcg_temp_new_i32();
10326 TCGv_i32 fp1
= tcg_temp_new_i32();
10327 gen_load_fpr32(ctx
, fp0
, fs
);
10328 gen_load_fpr32(ctx
, fp1
, ft
);
10329 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
10330 gen_store_fpr32(ctx
, fp1
, fd
);
10331 tcg_temp_free_i32(fp1
);
10332 tcg_temp_free_i32(fp0
);
10335 check_cp1_64bitmode(ctx
);
10337 TCGv_i32 fp0
= tcg_temp_new_i32();
10338 TCGv_i32 fp1
= tcg_temp_new_i32();
10340 gen_load_fpr32(ctx
, fp0
, fs
);
10341 gen_load_fpr32(ctx
, fp1
, ft
);
10342 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
10343 tcg_temp_free_i32(fp1
);
10344 gen_store_fpr32(ctx
, fp0
, fd
);
10345 tcg_temp_free_i32(fp0
);
10350 check_cp1_registers(ctx
, fd
);
10352 TCGv_i32 fp32
= tcg_temp_new_i32();
10353 TCGv_i64 fp64
= tcg_temp_new_i64();
10355 gen_load_fpr32(ctx
, fp32
, fs
);
10356 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
10357 tcg_temp_free_i32(fp32
);
10358 gen_store_fpr64(ctx
, fp64
, fd
);
10359 tcg_temp_free_i64(fp64
);
10364 TCGv_i32 fp0
= tcg_temp_new_i32();
10366 gen_load_fpr32(ctx
, fp0
, fs
);
10367 if (ctx
->nan2008
) {
10368 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
10370 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
10372 gen_store_fpr32(ctx
, fp0
, fd
);
10373 tcg_temp_free_i32(fp0
);
10377 check_cp1_64bitmode(ctx
);
10379 TCGv_i32 fp32
= tcg_temp_new_i32();
10380 TCGv_i64 fp64
= tcg_temp_new_i64();
10382 gen_load_fpr32(ctx
, fp32
, fs
);
10383 if (ctx
->nan2008
) {
10384 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
10386 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
10388 tcg_temp_free_i32(fp32
);
10389 gen_store_fpr64(ctx
, fp64
, fd
);
10390 tcg_temp_free_i64(fp64
);
10396 TCGv_i64 fp64
= tcg_temp_new_i64();
10397 TCGv_i32 fp32_0
= tcg_temp_new_i32();
10398 TCGv_i32 fp32_1
= tcg_temp_new_i32();
10400 gen_load_fpr32(ctx
, fp32_0
, fs
);
10401 gen_load_fpr32(ctx
, fp32_1
, ft
);
10402 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
10403 tcg_temp_free_i32(fp32_1
);
10404 tcg_temp_free_i32(fp32_0
);
10405 gen_store_fpr64(ctx
, fp64
, fd
);
10406 tcg_temp_free_i64(fp64
);
10412 case OPC_CMP_UEQ_S
:
10413 case OPC_CMP_OLT_S
:
10414 case OPC_CMP_ULT_S
:
10415 case OPC_CMP_OLE_S
:
10416 case OPC_CMP_ULE_S
:
10418 case OPC_CMP_NGLE_S
:
10419 case OPC_CMP_SEQ_S
:
10420 case OPC_CMP_NGL_S
:
10422 case OPC_CMP_NGE_S
:
10424 case OPC_CMP_NGT_S
:
10425 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10426 if (ctx
->opcode
& (1 << 6)) {
10427 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
10429 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
10433 check_cp1_registers(ctx
, fs
| ft
| fd
);
10435 TCGv_i64 fp0
= tcg_temp_new_i64();
10436 TCGv_i64 fp1
= tcg_temp_new_i64();
10438 gen_load_fpr64(ctx
, fp0
, fs
);
10439 gen_load_fpr64(ctx
, fp1
, ft
);
10440 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
10441 tcg_temp_free_i64(fp1
);
10442 gen_store_fpr64(ctx
, fp0
, fd
);
10443 tcg_temp_free_i64(fp0
);
10447 check_cp1_registers(ctx
, fs
| ft
| fd
);
10449 TCGv_i64 fp0
= tcg_temp_new_i64();
10450 TCGv_i64 fp1
= tcg_temp_new_i64();
10452 gen_load_fpr64(ctx
, fp0
, fs
);
10453 gen_load_fpr64(ctx
, fp1
, ft
);
10454 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
10455 tcg_temp_free_i64(fp1
);
10456 gen_store_fpr64(ctx
, fp0
, fd
);
10457 tcg_temp_free_i64(fp0
);
10461 check_cp1_registers(ctx
, fs
| ft
| fd
);
10463 TCGv_i64 fp0
= tcg_temp_new_i64();
10464 TCGv_i64 fp1
= tcg_temp_new_i64();
10466 gen_load_fpr64(ctx
, fp0
, fs
);
10467 gen_load_fpr64(ctx
, fp1
, ft
);
10468 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
10469 tcg_temp_free_i64(fp1
);
10470 gen_store_fpr64(ctx
, fp0
, fd
);
10471 tcg_temp_free_i64(fp0
);
10475 check_cp1_registers(ctx
, fs
| ft
| fd
);
10477 TCGv_i64 fp0
= tcg_temp_new_i64();
10478 TCGv_i64 fp1
= tcg_temp_new_i64();
10480 gen_load_fpr64(ctx
, fp0
, fs
);
10481 gen_load_fpr64(ctx
, fp1
, ft
);
10482 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
10483 tcg_temp_free_i64(fp1
);
10484 gen_store_fpr64(ctx
, fp0
, fd
);
10485 tcg_temp_free_i64(fp0
);
10489 check_cp1_registers(ctx
, fs
| fd
);
10491 TCGv_i64 fp0
= tcg_temp_new_i64();
10493 gen_load_fpr64(ctx
, fp0
, fs
);
10494 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
10495 gen_store_fpr64(ctx
, fp0
, fd
);
10496 tcg_temp_free_i64(fp0
);
10500 check_cp1_registers(ctx
, fs
| fd
);
10502 TCGv_i64 fp0
= tcg_temp_new_i64();
10504 gen_load_fpr64(ctx
, fp0
, fs
);
10505 if (ctx
->abs2008
) {
10506 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
10508 gen_helper_float_abs_d(fp0
, fp0
);
10510 gen_store_fpr64(ctx
, fp0
, fd
);
10511 tcg_temp_free_i64(fp0
);
10515 check_cp1_registers(ctx
, fs
| fd
);
10517 TCGv_i64 fp0
= tcg_temp_new_i64();
10519 gen_load_fpr64(ctx
, fp0
, fs
);
10520 gen_store_fpr64(ctx
, fp0
, fd
);
10521 tcg_temp_free_i64(fp0
);
10525 check_cp1_registers(ctx
, fs
| fd
);
10527 TCGv_i64 fp0
= tcg_temp_new_i64();
10529 gen_load_fpr64(ctx
, fp0
, fs
);
10530 if (ctx
->abs2008
) {
10531 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
10533 gen_helper_float_chs_d(fp0
, fp0
);
10535 gen_store_fpr64(ctx
, fp0
, fd
);
10536 tcg_temp_free_i64(fp0
);
10539 case OPC_ROUND_L_D
:
10540 check_cp1_64bitmode(ctx
);
10542 TCGv_i64 fp0
= tcg_temp_new_i64();
10544 gen_load_fpr64(ctx
, fp0
, fs
);
10545 if (ctx
->nan2008
) {
10546 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
10548 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
10550 gen_store_fpr64(ctx
, fp0
, fd
);
10551 tcg_temp_free_i64(fp0
);
10554 case OPC_TRUNC_L_D
:
10555 check_cp1_64bitmode(ctx
);
10557 TCGv_i64 fp0
= tcg_temp_new_i64();
10559 gen_load_fpr64(ctx
, fp0
, fs
);
10560 if (ctx
->nan2008
) {
10561 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
10563 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
10565 gen_store_fpr64(ctx
, fp0
, fd
);
10566 tcg_temp_free_i64(fp0
);
10570 check_cp1_64bitmode(ctx
);
10572 TCGv_i64 fp0
= tcg_temp_new_i64();
10574 gen_load_fpr64(ctx
, fp0
, fs
);
10575 if (ctx
->nan2008
) {
10576 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
10578 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
10580 gen_store_fpr64(ctx
, fp0
, fd
);
10581 tcg_temp_free_i64(fp0
);
10584 case OPC_FLOOR_L_D
:
10585 check_cp1_64bitmode(ctx
);
10587 TCGv_i64 fp0
= tcg_temp_new_i64();
10589 gen_load_fpr64(ctx
, fp0
, fs
);
10590 if (ctx
->nan2008
) {
10591 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
10593 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
10595 gen_store_fpr64(ctx
, fp0
, fd
);
10596 tcg_temp_free_i64(fp0
);
10599 case OPC_ROUND_W_D
:
10600 check_cp1_registers(ctx
, fs
);
10602 TCGv_i32 fp32
= tcg_temp_new_i32();
10603 TCGv_i64 fp64
= tcg_temp_new_i64();
10605 gen_load_fpr64(ctx
, fp64
, fs
);
10606 if (ctx
->nan2008
) {
10607 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
10609 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
10611 tcg_temp_free_i64(fp64
);
10612 gen_store_fpr32(ctx
, fp32
, fd
);
10613 tcg_temp_free_i32(fp32
);
10616 case OPC_TRUNC_W_D
:
10617 check_cp1_registers(ctx
, fs
);
10619 TCGv_i32 fp32
= tcg_temp_new_i32();
10620 TCGv_i64 fp64
= tcg_temp_new_i64();
10622 gen_load_fpr64(ctx
, fp64
, fs
);
10623 if (ctx
->nan2008
) {
10624 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
10626 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
10628 tcg_temp_free_i64(fp64
);
10629 gen_store_fpr32(ctx
, fp32
, fd
);
10630 tcg_temp_free_i32(fp32
);
10634 check_cp1_registers(ctx
, fs
);
10636 TCGv_i32 fp32
= tcg_temp_new_i32();
10637 TCGv_i64 fp64
= tcg_temp_new_i64();
10639 gen_load_fpr64(ctx
, fp64
, fs
);
10640 if (ctx
->nan2008
) {
10641 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
10643 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
10645 tcg_temp_free_i64(fp64
);
10646 gen_store_fpr32(ctx
, fp32
, fd
);
10647 tcg_temp_free_i32(fp32
);
10650 case OPC_FLOOR_W_D
:
10651 check_cp1_registers(ctx
, fs
);
10653 TCGv_i32 fp32
= tcg_temp_new_i32();
10654 TCGv_i64 fp64
= tcg_temp_new_i64();
10656 gen_load_fpr64(ctx
, fp64
, fs
);
10657 if (ctx
->nan2008
) {
10658 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
10660 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
10662 tcg_temp_free_i64(fp64
);
10663 gen_store_fpr32(ctx
, fp32
, fd
);
10664 tcg_temp_free_i32(fp32
);
10668 check_insn(ctx
, ISA_MIPS_R6
);
10669 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10672 check_insn(ctx
, ISA_MIPS_R6
);
10673 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10676 check_insn(ctx
, ISA_MIPS_R6
);
10677 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
10680 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10681 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10684 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10686 TCGLabel
*l1
= gen_new_label();
10690 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10692 fp0
= tcg_temp_new_i64();
10693 gen_load_fpr64(ctx
, fp0
, fs
);
10694 gen_store_fpr64(ctx
, fp0
, fd
);
10695 tcg_temp_free_i64(fp0
);
10700 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10702 TCGLabel
*l1
= gen_new_label();
10706 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10707 fp0
= tcg_temp_new_i64();
10708 gen_load_fpr64(ctx
, fp0
, fs
);
10709 gen_store_fpr64(ctx
, fp0
, fd
);
10710 tcg_temp_free_i64(fp0
);
10716 check_cp1_registers(ctx
, fs
| fd
);
10718 TCGv_i64 fp0
= tcg_temp_new_i64();
10720 gen_load_fpr64(ctx
, fp0
, fs
);
10721 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
10722 gen_store_fpr64(ctx
, fp0
, fd
);
10723 tcg_temp_free_i64(fp0
);
10727 check_cp1_registers(ctx
, fs
| fd
);
10729 TCGv_i64 fp0
= tcg_temp_new_i64();
10731 gen_load_fpr64(ctx
, fp0
, fs
);
10732 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
10733 gen_store_fpr64(ctx
, fp0
, fd
);
10734 tcg_temp_free_i64(fp0
);
10738 check_insn(ctx
, ISA_MIPS_R6
);
10740 TCGv_i64 fp0
= tcg_temp_new_i64();
10741 TCGv_i64 fp1
= tcg_temp_new_i64();
10742 TCGv_i64 fp2
= tcg_temp_new_i64();
10743 gen_load_fpr64(ctx
, fp0
, fs
);
10744 gen_load_fpr64(ctx
, fp1
, ft
);
10745 gen_load_fpr64(ctx
, fp2
, fd
);
10746 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10747 gen_store_fpr64(ctx
, fp2
, fd
);
10748 tcg_temp_free_i64(fp2
);
10749 tcg_temp_free_i64(fp1
);
10750 tcg_temp_free_i64(fp0
);
10754 check_insn(ctx
, ISA_MIPS_R6
);
10756 TCGv_i64 fp0
= tcg_temp_new_i64();
10757 TCGv_i64 fp1
= tcg_temp_new_i64();
10758 TCGv_i64 fp2
= tcg_temp_new_i64();
10759 gen_load_fpr64(ctx
, fp0
, fs
);
10760 gen_load_fpr64(ctx
, fp1
, ft
);
10761 gen_load_fpr64(ctx
, fp2
, fd
);
10762 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10763 gen_store_fpr64(ctx
, fp2
, fd
);
10764 tcg_temp_free_i64(fp2
);
10765 tcg_temp_free_i64(fp1
);
10766 tcg_temp_free_i64(fp0
);
10770 check_insn(ctx
, ISA_MIPS_R6
);
10772 TCGv_i64 fp0
= tcg_temp_new_i64();
10773 gen_load_fpr64(ctx
, fp0
, fs
);
10774 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
10775 gen_store_fpr64(ctx
, fp0
, fd
);
10776 tcg_temp_free_i64(fp0
);
10780 check_insn(ctx
, ISA_MIPS_R6
);
10782 TCGv_i64 fp0
= tcg_temp_new_i64();
10783 gen_load_fpr64(ctx
, fp0
, fs
);
10784 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
10785 gen_store_fpr64(ctx
, fp0
, fd
);
10786 tcg_temp_free_i64(fp0
);
10789 case OPC_MIN_D
: /* OPC_RECIP2_D */
10790 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10792 TCGv_i64 fp0
= tcg_temp_new_i64();
10793 TCGv_i64 fp1
= tcg_temp_new_i64();
10794 gen_load_fpr64(ctx
, fp0
, fs
);
10795 gen_load_fpr64(ctx
, fp1
, ft
);
10796 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
10797 gen_store_fpr64(ctx
, fp1
, fd
);
10798 tcg_temp_free_i64(fp1
);
10799 tcg_temp_free_i64(fp0
);
10802 check_cp1_64bitmode(ctx
);
10804 TCGv_i64 fp0
= tcg_temp_new_i64();
10805 TCGv_i64 fp1
= tcg_temp_new_i64();
10807 gen_load_fpr64(ctx
, fp0
, fs
);
10808 gen_load_fpr64(ctx
, fp1
, ft
);
10809 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
10810 tcg_temp_free_i64(fp1
);
10811 gen_store_fpr64(ctx
, fp0
, fd
);
10812 tcg_temp_free_i64(fp0
);
10816 case OPC_MINA_D
: /* OPC_RECIP1_D */
10817 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10819 TCGv_i64 fp0
= tcg_temp_new_i64();
10820 TCGv_i64 fp1
= tcg_temp_new_i64();
10821 gen_load_fpr64(ctx
, fp0
, fs
);
10822 gen_load_fpr64(ctx
, fp1
, ft
);
10823 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
10824 gen_store_fpr64(ctx
, fp1
, fd
);
10825 tcg_temp_free_i64(fp1
);
10826 tcg_temp_free_i64(fp0
);
10829 check_cp1_64bitmode(ctx
);
10831 TCGv_i64 fp0
= tcg_temp_new_i64();
10833 gen_load_fpr64(ctx
, fp0
, fs
);
10834 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
10835 gen_store_fpr64(ctx
, fp0
, fd
);
10836 tcg_temp_free_i64(fp0
);
10840 case OPC_MAX_D
: /* OPC_RSQRT1_D */
10841 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10843 TCGv_i64 fp0
= tcg_temp_new_i64();
10844 TCGv_i64 fp1
= tcg_temp_new_i64();
10845 gen_load_fpr64(ctx
, fp0
, fs
);
10846 gen_load_fpr64(ctx
, fp1
, ft
);
10847 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
10848 gen_store_fpr64(ctx
, fp1
, fd
);
10849 tcg_temp_free_i64(fp1
);
10850 tcg_temp_free_i64(fp0
);
10853 check_cp1_64bitmode(ctx
);
10855 TCGv_i64 fp0
= tcg_temp_new_i64();
10857 gen_load_fpr64(ctx
, fp0
, fs
);
10858 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
10859 gen_store_fpr64(ctx
, fp0
, fd
);
10860 tcg_temp_free_i64(fp0
);
10864 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
10865 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
10867 TCGv_i64 fp0
= tcg_temp_new_i64();
10868 TCGv_i64 fp1
= tcg_temp_new_i64();
10869 gen_load_fpr64(ctx
, fp0
, fs
);
10870 gen_load_fpr64(ctx
, fp1
, ft
);
10871 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
10872 gen_store_fpr64(ctx
, fp1
, fd
);
10873 tcg_temp_free_i64(fp1
);
10874 tcg_temp_free_i64(fp0
);
10877 check_cp1_64bitmode(ctx
);
10879 TCGv_i64 fp0
= tcg_temp_new_i64();
10880 TCGv_i64 fp1
= tcg_temp_new_i64();
10882 gen_load_fpr64(ctx
, fp0
, fs
);
10883 gen_load_fpr64(ctx
, fp1
, ft
);
10884 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
10885 tcg_temp_free_i64(fp1
);
10886 gen_store_fpr64(ctx
, fp0
, fd
);
10887 tcg_temp_free_i64(fp0
);
10894 case OPC_CMP_UEQ_D
:
10895 case OPC_CMP_OLT_D
:
10896 case OPC_CMP_ULT_D
:
10897 case OPC_CMP_OLE_D
:
10898 case OPC_CMP_ULE_D
:
10900 case OPC_CMP_NGLE_D
:
10901 case OPC_CMP_SEQ_D
:
10902 case OPC_CMP_NGL_D
:
10904 case OPC_CMP_NGE_D
:
10906 case OPC_CMP_NGT_D
:
10907 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
10908 if (ctx
->opcode
& (1 << 6)) {
10909 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
10911 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
10915 check_cp1_registers(ctx
, fs
);
10917 TCGv_i32 fp32
= tcg_temp_new_i32();
10918 TCGv_i64 fp64
= tcg_temp_new_i64();
10920 gen_load_fpr64(ctx
, fp64
, fs
);
10921 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
10922 tcg_temp_free_i64(fp64
);
10923 gen_store_fpr32(ctx
, fp32
, fd
);
10924 tcg_temp_free_i32(fp32
);
10928 check_cp1_registers(ctx
, fs
);
10930 TCGv_i32 fp32
= tcg_temp_new_i32();
10931 TCGv_i64 fp64
= tcg_temp_new_i64();
10933 gen_load_fpr64(ctx
, fp64
, fs
);
10934 if (ctx
->nan2008
) {
10935 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
10937 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
10939 tcg_temp_free_i64(fp64
);
10940 gen_store_fpr32(ctx
, fp32
, fd
);
10941 tcg_temp_free_i32(fp32
);
10945 check_cp1_64bitmode(ctx
);
10947 TCGv_i64 fp0
= tcg_temp_new_i64();
10949 gen_load_fpr64(ctx
, fp0
, fs
);
10950 if (ctx
->nan2008
) {
10951 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
10953 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
10955 gen_store_fpr64(ctx
, fp0
, fd
);
10956 tcg_temp_free_i64(fp0
);
10961 TCGv_i32 fp0
= tcg_temp_new_i32();
10963 gen_load_fpr32(ctx
, fp0
, fs
);
10964 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
10965 gen_store_fpr32(ctx
, fp0
, fd
);
10966 tcg_temp_free_i32(fp0
);
10970 check_cp1_registers(ctx
, fd
);
10972 TCGv_i32 fp32
= tcg_temp_new_i32();
10973 TCGv_i64 fp64
= tcg_temp_new_i64();
10975 gen_load_fpr32(ctx
, fp32
, fs
);
10976 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
10977 tcg_temp_free_i32(fp32
);
10978 gen_store_fpr64(ctx
, fp64
, fd
);
10979 tcg_temp_free_i64(fp64
);
10983 check_cp1_64bitmode(ctx
);
10985 TCGv_i32 fp32
= tcg_temp_new_i32();
10986 TCGv_i64 fp64
= tcg_temp_new_i64();
10988 gen_load_fpr64(ctx
, fp64
, fs
);
10989 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
10990 tcg_temp_free_i64(fp64
);
10991 gen_store_fpr32(ctx
, fp32
, fd
);
10992 tcg_temp_free_i32(fp32
);
10996 check_cp1_64bitmode(ctx
);
10998 TCGv_i64 fp0
= tcg_temp_new_i64();
11000 gen_load_fpr64(ctx
, fp0
, fs
);
11001 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
11002 gen_store_fpr64(ctx
, fp0
, fd
);
11003 tcg_temp_free_i64(fp0
);
11006 case OPC_CVT_PS_PW
:
11009 TCGv_i64 fp0
= tcg_temp_new_i64();
11011 gen_load_fpr64(ctx
, fp0
, fs
);
11012 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
11013 gen_store_fpr64(ctx
, fp0
, fd
);
11014 tcg_temp_free_i64(fp0
);
11020 TCGv_i64 fp0
= tcg_temp_new_i64();
11021 TCGv_i64 fp1
= tcg_temp_new_i64();
11023 gen_load_fpr64(ctx
, fp0
, fs
);
11024 gen_load_fpr64(ctx
, fp1
, ft
);
11025 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
11026 tcg_temp_free_i64(fp1
);
11027 gen_store_fpr64(ctx
, fp0
, fd
);
11028 tcg_temp_free_i64(fp0
);
11034 TCGv_i64 fp0
= tcg_temp_new_i64();
11035 TCGv_i64 fp1
= tcg_temp_new_i64();
11037 gen_load_fpr64(ctx
, fp0
, fs
);
11038 gen_load_fpr64(ctx
, fp1
, ft
);
11039 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
11040 tcg_temp_free_i64(fp1
);
11041 gen_store_fpr64(ctx
, fp0
, fd
);
11042 tcg_temp_free_i64(fp0
);
11048 TCGv_i64 fp0
= tcg_temp_new_i64();
11049 TCGv_i64 fp1
= tcg_temp_new_i64();
11051 gen_load_fpr64(ctx
, fp0
, fs
);
11052 gen_load_fpr64(ctx
, fp1
, ft
);
11053 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
11054 tcg_temp_free_i64(fp1
);
11055 gen_store_fpr64(ctx
, fp0
, fd
);
11056 tcg_temp_free_i64(fp0
);
11062 TCGv_i64 fp0
= tcg_temp_new_i64();
11064 gen_load_fpr64(ctx
, fp0
, fs
);
11065 gen_helper_float_abs_ps(fp0
, fp0
);
11066 gen_store_fpr64(ctx
, fp0
, fd
);
11067 tcg_temp_free_i64(fp0
);
11073 TCGv_i64 fp0
= tcg_temp_new_i64();
11075 gen_load_fpr64(ctx
, fp0
, fs
);
11076 gen_store_fpr64(ctx
, fp0
, fd
);
11077 tcg_temp_free_i64(fp0
);
11083 TCGv_i64 fp0
= tcg_temp_new_i64();
11085 gen_load_fpr64(ctx
, fp0
, fs
);
11086 gen_helper_float_chs_ps(fp0
, fp0
);
11087 gen_store_fpr64(ctx
, fp0
, fd
);
11088 tcg_temp_free_i64(fp0
);
11093 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11098 TCGLabel
*l1
= gen_new_label();
11102 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11104 fp0
= tcg_temp_new_i64();
11105 gen_load_fpr64(ctx
, fp0
, fs
);
11106 gen_store_fpr64(ctx
, fp0
, fd
);
11107 tcg_temp_free_i64(fp0
);
11114 TCGLabel
*l1
= gen_new_label();
11118 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11119 fp0
= tcg_temp_new_i64();
11120 gen_load_fpr64(ctx
, fp0
, fs
);
11121 gen_store_fpr64(ctx
, fp0
, fd
);
11122 tcg_temp_free_i64(fp0
);
11130 TCGv_i64 fp0
= tcg_temp_new_i64();
11131 TCGv_i64 fp1
= tcg_temp_new_i64();
11133 gen_load_fpr64(ctx
, fp0
, ft
);
11134 gen_load_fpr64(ctx
, fp1
, fs
);
11135 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
11136 tcg_temp_free_i64(fp1
);
11137 gen_store_fpr64(ctx
, fp0
, fd
);
11138 tcg_temp_free_i64(fp0
);
11144 TCGv_i64 fp0
= tcg_temp_new_i64();
11145 TCGv_i64 fp1
= tcg_temp_new_i64();
11147 gen_load_fpr64(ctx
, fp0
, ft
);
11148 gen_load_fpr64(ctx
, fp1
, fs
);
11149 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
11150 tcg_temp_free_i64(fp1
);
11151 gen_store_fpr64(ctx
, fp0
, fd
);
11152 tcg_temp_free_i64(fp0
);
11155 case OPC_RECIP2_PS
:
11158 TCGv_i64 fp0
= tcg_temp_new_i64();
11159 TCGv_i64 fp1
= tcg_temp_new_i64();
11161 gen_load_fpr64(ctx
, fp0
, fs
);
11162 gen_load_fpr64(ctx
, fp1
, ft
);
11163 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
11164 tcg_temp_free_i64(fp1
);
11165 gen_store_fpr64(ctx
, fp0
, fd
);
11166 tcg_temp_free_i64(fp0
);
11169 case OPC_RECIP1_PS
:
11172 TCGv_i64 fp0
= tcg_temp_new_i64();
11174 gen_load_fpr64(ctx
, fp0
, fs
);
11175 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
11176 gen_store_fpr64(ctx
, fp0
, fd
);
11177 tcg_temp_free_i64(fp0
);
11180 case OPC_RSQRT1_PS
:
11183 TCGv_i64 fp0
= tcg_temp_new_i64();
11185 gen_load_fpr64(ctx
, fp0
, fs
);
11186 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
11187 gen_store_fpr64(ctx
, fp0
, fd
);
11188 tcg_temp_free_i64(fp0
);
11191 case OPC_RSQRT2_PS
:
11194 TCGv_i64 fp0
= tcg_temp_new_i64();
11195 TCGv_i64 fp1
= tcg_temp_new_i64();
11197 gen_load_fpr64(ctx
, fp0
, fs
);
11198 gen_load_fpr64(ctx
, fp1
, ft
);
11199 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
11200 tcg_temp_free_i64(fp1
);
11201 gen_store_fpr64(ctx
, fp0
, fd
);
11202 tcg_temp_free_i64(fp0
);
11206 check_cp1_64bitmode(ctx
);
11208 TCGv_i32 fp0
= tcg_temp_new_i32();
11210 gen_load_fpr32h(ctx
, fp0
, fs
);
11211 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
11212 gen_store_fpr32(ctx
, fp0
, fd
);
11213 tcg_temp_free_i32(fp0
);
11216 case OPC_CVT_PW_PS
:
11219 TCGv_i64 fp0
= tcg_temp_new_i64();
11221 gen_load_fpr64(ctx
, fp0
, fs
);
11222 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
11223 gen_store_fpr64(ctx
, fp0
, fd
);
11224 tcg_temp_free_i64(fp0
);
11228 check_cp1_64bitmode(ctx
);
11230 TCGv_i32 fp0
= tcg_temp_new_i32();
11232 gen_load_fpr32(ctx
, fp0
, fs
);
11233 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
11234 gen_store_fpr32(ctx
, fp0
, fd
);
11235 tcg_temp_free_i32(fp0
);
11241 TCGv_i32 fp0
= tcg_temp_new_i32();
11242 TCGv_i32 fp1
= tcg_temp_new_i32();
11244 gen_load_fpr32(ctx
, fp0
, fs
);
11245 gen_load_fpr32(ctx
, fp1
, ft
);
11246 gen_store_fpr32h(ctx
, fp0
, fd
);
11247 gen_store_fpr32(ctx
, fp1
, fd
);
11248 tcg_temp_free_i32(fp0
);
11249 tcg_temp_free_i32(fp1
);
11255 TCGv_i32 fp0
= tcg_temp_new_i32();
11256 TCGv_i32 fp1
= tcg_temp_new_i32();
11258 gen_load_fpr32(ctx
, fp0
, fs
);
11259 gen_load_fpr32h(ctx
, fp1
, ft
);
11260 gen_store_fpr32(ctx
, fp1
, fd
);
11261 gen_store_fpr32h(ctx
, fp0
, fd
);
11262 tcg_temp_free_i32(fp0
);
11263 tcg_temp_free_i32(fp1
);
11269 TCGv_i32 fp0
= tcg_temp_new_i32();
11270 TCGv_i32 fp1
= tcg_temp_new_i32();
11272 gen_load_fpr32h(ctx
, fp0
, fs
);
11273 gen_load_fpr32(ctx
, fp1
, ft
);
11274 gen_store_fpr32(ctx
, fp1
, fd
);
11275 gen_store_fpr32h(ctx
, fp0
, fd
);
11276 tcg_temp_free_i32(fp0
);
11277 tcg_temp_free_i32(fp1
);
11283 TCGv_i32 fp0
= tcg_temp_new_i32();
11284 TCGv_i32 fp1
= tcg_temp_new_i32();
11286 gen_load_fpr32h(ctx
, fp0
, fs
);
11287 gen_load_fpr32h(ctx
, fp1
, ft
);
11288 gen_store_fpr32(ctx
, fp1
, fd
);
11289 gen_store_fpr32h(ctx
, fp0
, fd
);
11290 tcg_temp_free_i32(fp0
);
11291 tcg_temp_free_i32(fp1
);
11295 case OPC_CMP_UN_PS
:
11296 case OPC_CMP_EQ_PS
:
11297 case OPC_CMP_UEQ_PS
:
11298 case OPC_CMP_OLT_PS
:
11299 case OPC_CMP_ULT_PS
:
11300 case OPC_CMP_OLE_PS
:
11301 case OPC_CMP_ULE_PS
:
11302 case OPC_CMP_SF_PS
:
11303 case OPC_CMP_NGLE_PS
:
11304 case OPC_CMP_SEQ_PS
:
11305 case OPC_CMP_NGL_PS
:
11306 case OPC_CMP_LT_PS
:
11307 case OPC_CMP_NGE_PS
:
11308 case OPC_CMP_LE_PS
:
11309 case OPC_CMP_NGT_PS
:
11310 if (ctx
->opcode
& (1 << 6)) {
11311 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
11313 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
11317 MIPS_INVAL("farith");
11318 gen_reserved_instruction(ctx
);
11323 /* Coprocessor 3 (FPU) */
11324 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
11325 int fd
, int fs
, int base
, int index
)
11327 TCGv t0
= tcg_temp_new();
11330 gen_load_gpr(t0
, index
);
11331 } else if (index
== 0) {
11332 gen_load_gpr(t0
, base
);
11334 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
11337 * Don't do NOP if destination is zero: we must perform the actual
11344 TCGv_i32 fp0
= tcg_temp_new_i32();
11346 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
11347 tcg_gen_trunc_tl_i32(fp0
, t0
);
11348 gen_store_fpr32(ctx
, fp0
, fd
);
11349 tcg_temp_free_i32(fp0
);
11354 check_cp1_registers(ctx
, fd
);
11356 TCGv_i64 fp0
= tcg_temp_new_i64();
11357 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11358 gen_store_fpr64(ctx
, fp0
, fd
);
11359 tcg_temp_free_i64(fp0
);
11363 check_cp1_64bitmode(ctx
);
11364 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11366 TCGv_i64 fp0
= tcg_temp_new_i64();
11368 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11369 gen_store_fpr64(ctx
, fp0
, fd
);
11370 tcg_temp_free_i64(fp0
);
11376 TCGv_i32 fp0
= tcg_temp_new_i32();
11377 gen_load_fpr32(ctx
, fp0
, fs
);
11378 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
11379 tcg_temp_free_i32(fp0
);
11384 check_cp1_registers(ctx
, fs
);
11386 TCGv_i64 fp0
= tcg_temp_new_i64();
11387 gen_load_fpr64(ctx
, fp0
, fs
);
11388 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11389 tcg_temp_free_i64(fp0
);
11393 check_cp1_64bitmode(ctx
);
11394 tcg_gen_andi_tl(t0
, t0
, ~0x7);
11396 TCGv_i64 fp0
= tcg_temp_new_i64();
11397 gen_load_fpr64(ctx
, fp0
, fs
);
11398 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
11399 tcg_temp_free_i64(fp0
);
11406 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
11407 int fd
, int fr
, int fs
, int ft
)
11413 TCGv t0
= tcg_temp_local_new();
11414 TCGv_i32 fp
= tcg_temp_new_i32();
11415 TCGv_i32 fph
= tcg_temp_new_i32();
11416 TCGLabel
*l1
= gen_new_label();
11417 TCGLabel
*l2
= gen_new_label();
11419 gen_load_gpr(t0
, fr
);
11420 tcg_gen_andi_tl(t0
, t0
, 0x7);
11422 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
11423 gen_load_fpr32(ctx
, fp
, fs
);
11424 gen_load_fpr32h(ctx
, fph
, fs
);
11425 gen_store_fpr32(ctx
, fp
, fd
);
11426 gen_store_fpr32h(ctx
, fph
, fd
);
11429 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
11431 if (cpu_is_bigendian(ctx
)) {
11432 gen_load_fpr32(ctx
, fp
, fs
);
11433 gen_load_fpr32h(ctx
, fph
, ft
);
11434 gen_store_fpr32h(ctx
, fp
, fd
);
11435 gen_store_fpr32(ctx
, fph
, fd
);
11437 gen_load_fpr32h(ctx
, fph
, fs
);
11438 gen_load_fpr32(ctx
, fp
, ft
);
11439 gen_store_fpr32(ctx
, fph
, fd
);
11440 gen_store_fpr32h(ctx
, fp
, fd
);
11443 tcg_temp_free_i32(fp
);
11444 tcg_temp_free_i32(fph
);
11450 TCGv_i32 fp0
= tcg_temp_new_i32();
11451 TCGv_i32 fp1
= tcg_temp_new_i32();
11452 TCGv_i32 fp2
= tcg_temp_new_i32();
11454 gen_load_fpr32(ctx
, fp0
, fs
);
11455 gen_load_fpr32(ctx
, fp1
, ft
);
11456 gen_load_fpr32(ctx
, fp2
, fr
);
11457 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11458 tcg_temp_free_i32(fp0
);
11459 tcg_temp_free_i32(fp1
);
11460 gen_store_fpr32(ctx
, fp2
, fd
);
11461 tcg_temp_free_i32(fp2
);
11466 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11468 TCGv_i64 fp0
= tcg_temp_new_i64();
11469 TCGv_i64 fp1
= tcg_temp_new_i64();
11470 TCGv_i64 fp2
= tcg_temp_new_i64();
11472 gen_load_fpr64(ctx
, fp0
, fs
);
11473 gen_load_fpr64(ctx
, fp1
, ft
);
11474 gen_load_fpr64(ctx
, fp2
, fr
);
11475 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11476 tcg_temp_free_i64(fp0
);
11477 tcg_temp_free_i64(fp1
);
11478 gen_store_fpr64(ctx
, fp2
, fd
);
11479 tcg_temp_free_i64(fp2
);
11485 TCGv_i64 fp0
= tcg_temp_new_i64();
11486 TCGv_i64 fp1
= tcg_temp_new_i64();
11487 TCGv_i64 fp2
= tcg_temp_new_i64();
11489 gen_load_fpr64(ctx
, fp0
, fs
);
11490 gen_load_fpr64(ctx
, fp1
, ft
);
11491 gen_load_fpr64(ctx
, fp2
, fr
);
11492 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11493 tcg_temp_free_i64(fp0
);
11494 tcg_temp_free_i64(fp1
);
11495 gen_store_fpr64(ctx
, fp2
, fd
);
11496 tcg_temp_free_i64(fp2
);
11502 TCGv_i32 fp0
= tcg_temp_new_i32();
11503 TCGv_i32 fp1
= tcg_temp_new_i32();
11504 TCGv_i32 fp2
= tcg_temp_new_i32();
11506 gen_load_fpr32(ctx
, fp0
, fs
);
11507 gen_load_fpr32(ctx
, fp1
, ft
);
11508 gen_load_fpr32(ctx
, fp2
, fr
);
11509 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11510 tcg_temp_free_i32(fp0
);
11511 tcg_temp_free_i32(fp1
);
11512 gen_store_fpr32(ctx
, fp2
, fd
);
11513 tcg_temp_free_i32(fp2
);
11518 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11520 TCGv_i64 fp0
= tcg_temp_new_i64();
11521 TCGv_i64 fp1
= tcg_temp_new_i64();
11522 TCGv_i64 fp2
= tcg_temp_new_i64();
11524 gen_load_fpr64(ctx
, fp0
, fs
);
11525 gen_load_fpr64(ctx
, fp1
, ft
);
11526 gen_load_fpr64(ctx
, fp2
, fr
);
11527 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11528 tcg_temp_free_i64(fp0
);
11529 tcg_temp_free_i64(fp1
);
11530 gen_store_fpr64(ctx
, fp2
, fd
);
11531 tcg_temp_free_i64(fp2
);
11537 TCGv_i64 fp0
= tcg_temp_new_i64();
11538 TCGv_i64 fp1
= tcg_temp_new_i64();
11539 TCGv_i64 fp2
= tcg_temp_new_i64();
11541 gen_load_fpr64(ctx
, fp0
, fs
);
11542 gen_load_fpr64(ctx
, fp1
, ft
);
11543 gen_load_fpr64(ctx
, fp2
, fr
);
11544 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11545 tcg_temp_free_i64(fp0
);
11546 tcg_temp_free_i64(fp1
);
11547 gen_store_fpr64(ctx
, fp2
, fd
);
11548 tcg_temp_free_i64(fp2
);
11554 TCGv_i32 fp0
= tcg_temp_new_i32();
11555 TCGv_i32 fp1
= tcg_temp_new_i32();
11556 TCGv_i32 fp2
= tcg_temp_new_i32();
11558 gen_load_fpr32(ctx
, fp0
, fs
);
11559 gen_load_fpr32(ctx
, fp1
, ft
);
11560 gen_load_fpr32(ctx
, fp2
, fr
);
11561 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11562 tcg_temp_free_i32(fp0
);
11563 tcg_temp_free_i32(fp1
);
11564 gen_store_fpr32(ctx
, fp2
, fd
);
11565 tcg_temp_free_i32(fp2
);
11570 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11572 TCGv_i64 fp0
= tcg_temp_new_i64();
11573 TCGv_i64 fp1
= tcg_temp_new_i64();
11574 TCGv_i64 fp2
= tcg_temp_new_i64();
11576 gen_load_fpr64(ctx
, fp0
, fs
);
11577 gen_load_fpr64(ctx
, fp1
, ft
);
11578 gen_load_fpr64(ctx
, fp2
, fr
);
11579 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11580 tcg_temp_free_i64(fp0
);
11581 tcg_temp_free_i64(fp1
);
11582 gen_store_fpr64(ctx
, fp2
, fd
);
11583 tcg_temp_free_i64(fp2
);
11589 TCGv_i64 fp0
= tcg_temp_new_i64();
11590 TCGv_i64 fp1
= tcg_temp_new_i64();
11591 TCGv_i64 fp2
= tcg_temp_new_i64();
11593 gen_load_fpr64(ctx
, fp0
, fs
);
11594 gen_load_fpr64(ctx
, fp1
, ft
);
11595 gen_load_fpr64(ctx
, fp2
, fr
);
11596 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11597 tcg_temp_free_i64(fp0
);
11598 tcg_temp_free_i64(fp1
);
11599 gen_store_fpr64(ctx
, fp2
, fd
);
11600 tcg_temp_free_i64(fp2
);
11606 TCGv_i32 fp0
= tcg_temp_new_i32();
11607 TCGv_i32 fp1
= tcg_temp_new_i32();
11608 TCGv_i32 fp2
= tcg_temp_new_i32();
11610 gen_load_fpr32(ctx
, fp0
, fs
);
11611 gen_load_fpr32(ctx
, fp1
, ft
);
11612 gen_load_fpr32(ctx
, fp2
, fr
);
11613 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11614 tcg_temp_free_i32(fp0
);
11615 tcg_temp_free_i32(fp1
);
11616 gen_store_fpr32(ctx
, fp2
, fd
);
11617 tcg_temp_free_i32(fp2
);
11622 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
11624 TCGv_i64 fp0
= tcg_temp_new_i64();
11625 TCGv_i64 fp1
= tcg_temp_new_i64();
11626 TCGv_i64 fp2
= tcg_temp_new_i64();
11628 gen_load_fpr64(ctx
, fp0
, fs
);
11629 gen_load_fpr64(ctx
, fp1
, ft
);
11630 gen_load_fpr64(ctx
, fp2
, fr
);
11631 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11632 tcg_temp_free_i64(fp0
);
11633 tcg_temp_free_i64(fp1
);
11634 gen_store_fpr64(ctx
, fp2
, fd
);
11635 tcg_temp_free_i64(fp2
);
11641 TCGv_i64 fp0
= tcg_temp_new_i64();
11642 TCGv_i64 fp1
= tcg_temp_new_i64();
11643 TCGv_i64 fp2
= tcg_temp_new_i64();
11645 gen_load_fpr64(ctx
, fp0
, fs
);
11646 gen_load_fpr64(ctx
, fp1
, ft
);
11647 gen_load_fpr64(ctx
, fp2
, fr
);
11648 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11649 tcg_temp_free_i64(fp0
);
11650 tcg_temp_free_i64(fp1
);
11651 gen_store_fpr64(ctx
, fp2
, fd
);
11652 tcg_temp_free_i64(fp2
);
11656 MIPS_INVAL("flt3_arith");
11657 gen_reserved_instruction(ctx
);
11662 void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
11666 #if !defined(CONFIG_USER_ONLY)
11668 * The Linux kernel will emulate rdhwr if it's not supported natively.
11669 * Therefore only check the ISA in system mode.
11671 check_insn(ctx
, ISA_MIPS_R2
);
11673 t0
= tcg_temp_new();
11677 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
11678 gen_store_gpr(t0
, rt
);
11681 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
11682 gen_store_gpr(t0
, rt
);
11685 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
11688 gen_helper_rdhwr_cc(t0
, cpu_env
);
11689 gen_store_gpr(t0
, rt
);
11691 * Break the TB to be able to take timer interrupts immediately
11692 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
11693 * we break completely out of translated code.
11695 gen_save_pc(ctx
->base
.pc_next
+ 4);
11696 ctx
->base
.is_jmp
= DISAS_EXIT
;
11699 gen_helper_rdhwr_ccres(t0
, cpu_env
);
11700 gen_store_gpr(t0
, rt
);
11703 check_insn(ctx
, ISA_MIPS_R6
);
11706 * Performance counter registers are not implemented other than
11707 * control register 0.
11709 generate_exception(ctx
, EXCP_RI
);
11711 gen_helper_rdhwr_performance(t0
, cpu_env
);
11712 gen_store_gpr(t0
, rt
);
11715 check_insn(ctx
, ISA_MIPS_R6
);
11716 gen_helper_rdhwr_xnp(t0
, cpu_env
);
11717 gen_store_gpr(t0
, rt
);
11720 #if defined(CONFIG_USER_ONLY)
11721 tcg_gen_ld_tl(t0
, cpu_env
,
11722 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11723 gen_store_gpr(t0
, rt
);
11726 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
11727 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
11728 tcg_gen_ld_tl(t0
, cpu_env
,
11729 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
11730 gen_store_gpr(t0
, rt
);
11732 gen_reserved_instruction(ctx
);
11736 default: /* Invalid */
11737 MIPS_INVAL("rdhwr");
11738 gen_reserved_instruction(ctx
);
11744 static inline void clear_branch_hflags(DisasContext
*ctx
)
11746 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
11747 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
11748 save_cpu_state(ctx
, 0);
11751 * It is not safe to save ctx->hflags as hflags may be changed
11752 * in execution time by the instruction in delay / forbidden slot.
11754 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
11758 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
11760 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11761 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
11762 /* Branches completion */
11763 clear_branch_hflags(ctx
);
11764 ctx
->base
.is_jmp
= DISAS_NORETURN
;
11765 /* FIXME: Need to clear can_do_io. */
11766 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
11767 case MIPS_HFLAG_FBNSLOT
:
11768 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
11771 /* unconditional branch */
11772 if (proc_hflags
& MIPS_HFLAG_BX
) {
11773 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
11775 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11777 case MIPS_HFLAG_BL
:
11778 /* blikely taken case */
11779 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11781 case MIPS_HFLAG_BC
:
11782 /* Conditional branch */
11784 TCGLabel
*l1
= gen_new_label();
11786 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11787 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
11789 gen_goto_tb(ctx
, 0, ctx
->btarget
);
11792 case MIPS_HFLAG_BR
:
11793 /* unconditional branch to register */
11794 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
11795 TCGv t0
= tcg_temp_new();
11796 TCGv_i32 t1
= tcg_temp_new_i32();
11798 tcg_gen_andi_tl(t0
, btarget
, 0x1);
11799 tcg_gen_trunc_tl_i32(t1
, t0
);
11801 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
11802 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
11803 tcg_gen_or_i32(hflags
, hflags
, t1
);
11804 tcg_temp_free_i32(t1
);
11806 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
11808 tcg_gen_mov_tl(cpu_PC
, btarget
);
11810 tcg_gen_lookup_and_goto_ptr();
11813 LOG_DISAS("unknown branch 0x%x\n", proc_hflags
);
11814 gen_reserved_instruction(ctx
);
11819 /* Compact Branches */
11820 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
11821 int rs
, int rt
, int32_t offset
)
11823 int bcond_compute
= 0;
11824 TCGv t0
= tcg_temp_new();
11825 TCGv t1
= tcg_temp_new();
11826 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
11828 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11829 #ifdef MIPS_DEBUG_DISAS
11830 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11831 "\n", ctx
->base
.pc_next
);
11833 gen_reserved_instruction(ctx
);
11837 /* Load needed operands and calculate btarget */
11839 /* compact branch */
11840 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11841 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11842 gen_load_gpr(t0
, rs
);
11843 gen_load_gpr(t1
, rt
);
11845 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11846 if (rs
<= rt
&& rs
== 0) {
11847 /* OPC_BEQZALC, OPC_BNEZALC */
11848 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11851 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11852 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11853 gen_load_gpr(t0
, rs
);
11854 gen_load_gpr(t1
, rt
);
11856 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11858 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11859 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11860 if (rs
== 0 || rs
== rt
) {
11861 /* OPC_BLEZALC, OPC_BGEZALC */
11862 /* OPC_BGTZALC, OPC_BLTZALC */
11863 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11865 gen_load_gpr(t0
, rs
);
11866 gen_load_gpr(t1
, rt
);
11868 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11872 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11877 /* OPC_BEQZC, OPC_BNEZC */
11878 gen_load_gpr(t0
, rs
);
11880 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
11882 /* OPC_JIC, OPC_JIALC */
11883 TCGv tbase
= tcg_temp_new();
11884 TCGv toffset
= tcg_constant_tl(offset
);
11886 gen_load_gpr(tbase
, rt
);
11887 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
11888 tcg_temp_free(tbase
);
11892 MIPS_INVAL("Compact branch/jump");
11893 gen_reserved_instruction(ctx
);
11897 if (bcond_compute
== 0) {
11898 /* Unconditional compact branch */
11901 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11904 ctx
->hflags
|= MIPS_HFLAG_BR
;
11907 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
11910 ctx
->hflags
|= MIPS_HFLAG_B
;
11913 MIPS_INVAL("Compact branch/jump");
11914 gen_reserved_instruction(ctx
);
11918 /* Generating branch here as compact branches don't have delay slot */
11919 gen_branch(ctx
, 4);
11921 /* Conditional compact branch */
11922 TCGLabel
*fs
= gen_new_label();
11923 save_cpu_state(ctx
, 0);
11926 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
11927 if (rs
== 0 && rt
!= 0) {
11929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11930 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11935 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
11938 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
11939 if (rs
== 0 && rt
!= 0) {
11941 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11942 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11944 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11947 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
11950 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
11951 if (rs
== 0 && rt
!= 0) {
11953 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
11954 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11956 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
11959 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
11962 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
11963 if (rs
== 0 && rt
!= 0) {
11965 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
11966 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
11968 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
11971 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
11974 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
11975 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
11977 /* OPC_BOVC, OPC_BNVC */
11978 TCGv t2
= tcg_temp_new();
11979 TCGv t3
= tcg_temp_new();
11980 TCGv t4
= tcg_temp_new();
11981 TCGv input_overflow
= tcg_temp_new();
11983 gen_load_gpr(t0
, rs
);
11984 gen_load_gpr(t1
, rt
);
11985 tcg_gen_ext32s_tl(t2
, t0
);
11986 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
11987 tcg_gen_ext32s_tl(t3
, t1
);
11988 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
11989 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
11991 tcg_gen_add_tl(t4
, t2
, t3
);
11992 tcg_gen_ext32s_tl(t4
, t4
);
11993 tcg_gen_xor_tl(t2
, t2
, t3
);
11994 tcg_gen_xor_tl(t3
, t4
, t3
);
11995 tcg_gen_andc_tl(t2
, t3
, t2
);
11996 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
11997 tcg_gen_or_tl(t4
, t4
, input_overflow
);
11998 if (opc
== OPC_BOVC
) {
12000 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
12003 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
12005 tcg_temp_free(input_overflow
);
12009 } else if (rs
< rt
&& rs
== 0) {
12010 /* OPC_BEQZALC, OPC_BNEZALC */
12011 if (opc
== OPC_BEQZALC
) {
12013 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
12016 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
12019 /* OPC_BEQC, OPC_BNEC */
12020 if (opc
== OPC_BEQC
) {
12022 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
12025 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
12030 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
12033 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
12036 MIPS_INVAL("Compact conditional branch/jump");
12037 gen_reserved_instruction(ctx
);
12041 /* Generating branch here as compact branches don't have delay slot */
12042 gen_goto_tb(ctx
, 1, ctx
->btarget
);
12045 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
12053 void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
12054 int is_64_bit
, int extended
)
12058 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
12059 gen_reserved_instruction(ctx
);
12063 t0
= tcg_temp_new();
12065 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
12066 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
12068 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12074 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
12077 TCGv_i32 t0
= tcg_const_i32(op
);
12078 TCGv t1
= tcg_temp_new();
12079 gen_base_offset_addr(ctx
, t1
, base
, offset
);
12080 gen_helper_cache(cpu_env
, t1
, t0
);
12082 tcg_temp_free_i32(t0
);
12085 static inline bool is_uhi(DisasContext
*ctx
, int sdbbp_code
)
12087 #ifdef CONFIG_USER_ONLY
12090 bool is_user
= (ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
;
12091 return semihosting_enabled(is_user
) && sdbbp_code
== 1;
12095 void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
12097 TCGv t0
= tcg_temp_new();
12098 TCGv t1
= tcg_temp_new();
12100 gen_load_gpr(t0
, base
);
12103 gen_load_gpr(t1
, index
);
12104 tcg_gen_shli_tl(t1
, t1
, 2);
12105 gen_op_addr_add(ctx
, t0
, t1
, t0
);
12108 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
12109 gen_store_gpr(t1
, rd
);
12115 static void gen_sync(int stype
)
12117 TCGBar tcg_mo
= TCG_BAR_SC
;
12120 case 0x4: /* SYNC_WMB */
12121 tcg_mo
|= TCG_MO_ST_ST
;
12123 case 0x10: /* SYNC_MB */
12124 tcg_mo
|= TCG_MO_ALL
;
12126 case 0x11: /* SYNC_ACQUIRE */
12127 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
12129 case 0x12: /* SYNC_RELEASE */
12130 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
12132 case 0x13: /* SYNC_RMB */
12133 tcg_mo
|= TCG_MO_LD_LD
;
12136 tcg_mo
|= TCG_MO_ALL
;
12140 tcg_gen_mb(tcg_mo
);
12143 /* ISA extensions (ASEs) */
12145 /* MIPS16 extension to MIPS32 */
12146 #include "mips16e_translate.c.inc"
12148 /* microMIPS extension to MIPS32/MIPS64 */
12151 * Values for microMIPS fmt field. Variable-width, depending on which
12152 * formats the instruction supports.
12171 #include "micromips_translate.c.inc"
12173 #include "nanomips_translate.c.inc"
12175 /* MIPSDSP functions. */
12177 /* Indexed load is not for DSP only */
12178 static void gen_mips_lx(DisasContext
*ctx
, uint32_t opc
,
12179 int rd
, int base
, int offset
)
12183 if (!(ctx
->insn_flags
& INSN_OCTEON
)) {
12186 t0
= tcg_temp_new();
12189 gen_load_gpr(t0
, offset
);
12190 } else if (offset
== 0) {
12191 gen_load_gpr(t0
, base
);
12193 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
12198 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
12199 gen_store_gpr(t0
, rd
);
12202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
12203 gen_store_gpr(t0
, rd
);
12206 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12207 gen_store_gpr(t0
, rd
);
12209 #if defined(TARGET_MIPS64)
12211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUQ
);
12212 gen_store_gpr(t0
, rd
);
12219 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12220 int ret
, int v1
, int v2
)
12226 /* Treat as NOP. */
12230 v1_t
= tcg_temp_new();
12231 v2_t
= tcg_temp_new();
12233 gen_load_gpr(v1_t
, v1
);
12234 gen_load_gpr(v2_t
, v2
);
12237 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
12238 case OPC_MULT_G_2E
:
12242 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12244 case OPC_ADDUH_R_QB
:
12245 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12248 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12250 case OPC_ADDQH_R_PH
:
12251 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12254 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12256 case OPC_ADDQH_R_W
:
12257 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12260 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12262 case OPC_SUBUH_R_QB
:
12263 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12266 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12268 case OPC_SUBQH_R_PH
:
12269 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12272 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12274 case OPC_SUBQH_R_W
:
12275 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12279 case OPC_ABSQ_S_PH_DSP
:
12281 case OPC_ABSQ_S_QB
:
12283 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
12285 case OPC_ABSQ_S_PH
:
12287 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
12291 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
12293 case OPC_PRECEQ_W_PHL
:
12295 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
12296 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12298 case OPC_PRECEQ_W_PHR
:
12300 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
12301 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
12302 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
12304 case OPC_PRECEQU_PH_QBL
:
12306 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
12308 case OPC_PRECEQU_PH_QBR
:
12310 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
12312 case OPC_PRECEQU_PH_QBLA
:
12314 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
12316 case OPC_PRECEQU_PH_QBRA
:
12318 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
12320 case OPC_PRECEU_PH_QBL
:
12322 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
12324 case OPC_PRECEU_PH_QBR
:
12326 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
12328 case OPC_PRECEU_PH_QBLA
:
12330 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
12332 case OPC_PRECEU_PH_QBRA
:
12334 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
12338 case OPC_ADDU_QB_DSP
:
12342 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12344 case OPC_ADDQ_S_PH
:
12346 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12350 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12354 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12356 case OPC_ADDU_S_QB
:
12358 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12362 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12364 case OPC_ADDU_S_PH
:
12366 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12370 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12372 case OPC_SUBQ_S_PH
:
12374 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12378 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12382 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12384 case OPC_SUBU_S_QB
:
12386 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12390 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12392 case OPC_SUBU_S_PH
:
12394 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12398 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12402 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12406 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
12408 case OPC_RADDU_W_QB
:
12410 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
12414 case OPC_CMPU_EQ_QB_DSP
:
12416 case OPC_PRECR_QB_PH
:
12418 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12420 case OPC_PRECRQ_QB_PH
:
12422 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12424 case OPC_PRECR_SRA_PH_W
:
12427 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12428 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12430 tcg_temp_free_i32(sa_t
);
12433 case OPC_PRECR_SRA_R_PH_W
:
12436 TCGv_i32 sa_t
= tcg_const_i32(v2
);
12437 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
12439 tcg_temp_free_i32(sa_t
);
12442 case OPC_PRECRQ_PH_W
:
12444 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12446 case OPC_PRECRQ_RS_PH_W
:
12448 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12450 case OPC_PRECRQU_S_QB_PH
:
12452 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12456 #ifdef TARGET_MIPS64
12457 case OPC_ABSQ_S_QH_DSP
:
12459 case OPC_PRECEQ_L_PWL
:
12461 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
12463 case OPC_PRECEQ_L_PWR
:
12465 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
12467 case OPC_PRECEQ_PW_QHL
:
12469 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
12471 case OPC_PRECEQ_PW_QHR
:
12473 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
12475 case OPC_PRECEQ_PW_QHLA
:
12477 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
12479 case OPC_PRECEQ_PW_QHRA
:
12481 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
12483 case OPC_PRECEQU_QH_OBL
:
12485 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
12487 case OPC_PRECEQU_QH_OBR
:
12489 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
12491 case OPC_PRECEQU_QH_OBLA
:
12493 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
12495 case OPC_PRECEQU_QH_OBRA
:
12497 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
12499 case OPC_PRECEU_QH_OBL
:
12501 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
12503 case OPC_PRECEU_QH_OBR
:
12505 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
12507 case OPC_PRECEU_QH_OBLA
:
12509 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
12511 case OPC_PRECEU_QH_OBRA
:
12513 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
12515 case OPC_ABSQ_S_OB
:
12517 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
12519 case OPC_ABSQ_S_PW
:
12521 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
12523 case OPC_ABSQ_S_QH
:
12525 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
12529 case OPC_ADDU_OB_DSP
:
12531 case OPC_RADDU_L_OB
:
12533 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
12537 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12539 case OPC_SUBQ_S_PW
:
12541 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12545 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12547 case OPC_SUBQ_S_QH
:
12549 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12553 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12555 case OPC_SUBU_S_OB
:
12557 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12561 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12563 case OPC_SUBU_S_QH
:
12565 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12569 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12571 case OPC_SUBUH_R_OB
:
12573 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12577 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12579 case OPC_ADDQ_S_PW
:
12581 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12585 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12587 case OPC_ADDQ_S_QH
:
12589 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12593 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12595 case OPC_ADDU_S_OB
:
12597 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12601 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12603 case OPC_ADDU_S_QH
:
12605 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12609 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12611 case OPC_ADDUH_R_OB
:
12613 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
12617 case OPC_CMPU_EQ_OB_DSP
:
12619 case OPC_PRECR_OB_QH
:
12621 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12623 case OPC_PRECR_SRA_QH_PW
:
12626 TCGv_i32 ret_t
= tcg_const_i32(ret
);
12627 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
12628 tcg_temp_free_i32(ret_t
);
12631 case OPC_PRECR_SRA_R_QH_PW
:
12634 TCGv_i32 sa_v
= tcg_const_i32(ret
);
12635 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
12636 tcg_temp_free_i32(sa_v
);
12639 case OPC_PRECRQ_OB_QH
:
12641 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
12643 case OPC_PRECRQ_PW_L
:
12645 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
12647 case OPC_PRECRQ_QH_PW
:
12649 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
12651 case OPC_PRECRQ_RS_QH_PW
:
12653 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12655 case OPC_PRECRQU_S_OB_QH
:
12657 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12664 tcg_temp_free(v1_t
);
12665 tcg_temp_free(v2_t
);
12668 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
12669 int ret
, int v1
, int v2
)
12677 /* Treat as NOP. */
12681 t0
= tcg_temp_new();
12682 v1_t
= tcg_temp_new();
12683 v2_t
= tcg_temp_new();
12685 tcg_gen_movi_tl(t0
, v1
);
12686 gen_load_gpr(v1_t
, v1
);
12687 gen_load_gpr(v2_t
, v2
);
12690 case OPC_SHLL_QB_DSP
:
12692 op2
= MASK_SHLL_QB(ctx
->opcode
);
12696 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12700 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12704 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12708 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12710 case OPC_SHLL_S_PH
:
12712 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12714 case OPC_SHLLV_S_PH
:
12716 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12720 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
12722 case OPC_SHLLV_S_W
:
12724 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12728 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
12732 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12736 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
12740 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12744 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
12746 case OPC_SHRA_R_QB
:
12748 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
12752 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12754 case OPC_SHRAV_R_QB
:
12756 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
12760 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
12762 case OPC_SHRA_R_PH
:
12764 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
12768 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12770 case OPC_SHRAV_R_PH
:
12772 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
12776 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
12778 case OPC_SHRAV_R_W
:
12780 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
12782 default: /* Invalid */
12783 MIPS_INVAL("MASK SHLL.QB");
12784 gen_reserved_instruction(ctx
);
12789 #ifdef TARGET_MIPS64
12790 case OPC_SHLL_OB_DSP
:
12791 op2
= MASK_SHLL_OB(ctx
->opcode
);
12795 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12799 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12801 case OPC_SHLL_S_PW
:
12803 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12805 case OPC_SHLLV_S_PW
:
12807 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12811 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12815 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12819 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12823 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12825 case OPC_SHLL_S_QH
:
12827 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
12829 case OPC_SHLLV_S_QH
:
12831 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
12835 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
12839 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12841 case OPC_SHRA_R_OB
:
12843 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
12845 case OPC_SHRAV_R_OB
:
12847 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12851 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
12855 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12857 case OPC_SHRA_R_PW
:
12859 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
12861 case OPC_SHRAV_R_PW
:
12863 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
12867 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
12871 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12873 case OPC_SHRA_R_QH
:
12875 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
12877 case OPC_SHRAV_R_QH
:
12879 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12883 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
12887 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
12891 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
12895 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
12897 default: /* Invalid */
12898 MIPS_INVAL("MASK SHLL.OB");
12899 gen_reserved_instruction(ctx
);
12907 tcg_temp_free(v1_t
);
12908 tcg_temp_free(v2_t
);
12911 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
12912 int ret
, int v1
, int v2
, int check_ret
)
12918 if ((ret
== 0) && (check_ret
== 1)) {
12919 /* Treat as NOP. */
12923 t0
= tcg_temp_new_i32();
12924 v1_t
= tcg_temp_new();
12925 v2_t
= tcg_temp_new();
12927 tcg_gen_movi_i32(t0
, ret
);
12928 gen_load_gpr(v1_t
, v1
);
12929 gen_load_gpr(v2_t
, v2
);
12933 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
12934 * the same mask and op1.
12936 case OPC_MULT_G_2E
:
12940 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12943 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12946 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12948 case OPC_MULQ_RS_W
:
12949 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
12953 case OPC_DPA_W_PH_DSP
:
12955 case OPC_DPAU_H_QBL
:
12957 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12959 case OPC_DPAU_H_QBR
:
12961 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12963 case OPC_DPSU_H_QBL
:
12965 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
12967 case OPC_DPSU_H_QBR
:
12969 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
12973 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12975 case OPC_DPAX_W_PH
:
12977 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12979 case OPC_DPAQ_S_W_PH
:
12981 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12983 case OPC_DPAQX_S_W_PH
:
12985 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12987 case OPC_DPAQX_SA_W_PH
:
12989 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12993 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12995 case OPC_DPSX_W_PH
:
12997 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
12999 case OPC_DPSQ_S_W_PH
:
13001 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13003 case OPC_DPSQX_S_W_PH
:
13005 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13007 case OPC_DPSQX_SA_W_PH
:
13009 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13011 case OPC_MULSAQ_S_W_PH
:
13013 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13015 case OPC_DPAQ_SA_L_W
:
13017 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13019 case OPC_DPSQ_SA_L_W
:
13021 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
13023 case OPC_MAQ_S_W_PHL
:
13025 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13027 case OPC_MAQ_S_W_PHR
:
13029 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13031 case OPC_MAQ_SA_W_PHL
:
13033 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
13035 case OPC_MAQ_SA_W_PHR
:
13037 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
13039 case OPC_MULSA_W_PH
:
13041 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
13045 #ifdef TARGET_MIPS64
13046 case OPC_DPAQ_W_QH_DSP
:
13048 int ac
= ret
& 0x03;
13049 tcg_gen_movi_i32(t0
, ac
);
13054 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
13058 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
13062 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
13066 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
13070 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13072 case OPC_DPAQ_S_W_QH
:
13074 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13076 case OPC_DPAQ_SA_L_PW
:
13078 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13080 case OPC_DPAU_H_OBL
:
13082 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13084 case OPC_DPAU_H_OBR
:
13086 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13090 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13092 case OPC_DPSQ_S_W_QH
:
13094 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13096 case OPC_DPSQ_SA_L_PW
:
13098 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13100 case OPC_DPSU_H_OBL
:
13102 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
13104 case OPC_DPSU_H_OBR
:
13106 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
13108 case OPC_MAQ_S_L_PWL
:
13110 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
13112 case OPC_MAQ_S_L_PWR
:
13114 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
13116 case OPC_MAQ_S_W_QHLL
:
13118 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13120 case OPC_MAQ_SA_W_QHLL
:
13122 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
13124 case OPC_MAQ_S_W_QHLR
:
13126 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13128 case OPC_MAQ_SA_W_QHLR
:
13130 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
13132 case OPC_MAQ_S_W_QHRL
:
13134 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13136 case OPC_MAQ_SA_W_QHRL
:
13138 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
13140 case OPC_MAQ_S_W_QHRR
:
13142 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13144 case OPC_MAQ_SA_W_QHRR
:
13146 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
13148 case OPC_MULSAQ_S_L_PW
:
13150 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
13152 case OPC_MULSAQ_S_W_QH
:
13154 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
13160 case OPC_ADDU_QB_DSP
:
13162 case OPC_MULEU_S_PH_QBL
:
13164 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13166 case OPC_MULEU_S_PH_QBR
:
13168 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13170 case OPC_MULQ_RS_PH
:
13172 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13174 case OPC_MULEQ_S_W_PHL
:
13176 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13178 case OPC_MULEQ_S_W_PHR
:
13180 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13182 case OPC_MULQ_S_PH
:
13184 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13188 #ifdef TARGET_MIPS64
13189 case OPC_ADDU_OB_DSP
:
13191 case OPC_MULEQ_S_PW_QHL
:
13193 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13195 case OPC_MULEQ_S_PW_QHR
:
13197 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13199 case OPC_MULEU_S_QH_OBL
:
13201 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13203 case OPC_MULEU_S_QH_OBR
:
13205 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13207 case OPC_MULQ_RS_QH
:
13209 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13216 tcg_temp_free_i32(t0
);
13217 tcg_temp_free(v1_t
);
13218 tcg_temp_free(v2_t
);
13221 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13229 /* Treat as NOP. */
13233 t0
= tcg_temp_new();
13234 val_t
= tcg_temp_new();
13235 gen_load_gpr(val_t
, val
);
13238 case OPC_ABSQ_S_PH_DSP
:
13242 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
13247 target_long result
;
13248 imm
= (ctx
->opcode
>> 16) & 0xFF;
13249 result
= (uint32_t)imm
<< 24 |
13250 (uint32_t)imm
<< 16 |
13251 (uint32_t)imm
<< 8 |
13253 result
= (int32_t)result
;
13254 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
13259 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13260 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13261 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13262 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13263 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13264 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13269 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13270 imm
= (int16_t)(imm
<< 6) >> 6;
13271 tcg_gen_movi_tl(cpu_gpr
[ret
], \
13272 (target_long
)((int32_t)imm
<< 16 | \
13278 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13279 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13280 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13281 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
13285 #ifdef TARGET_MIPS64
13286 case OPC_ABSQ_S_QH_DSP
:
13293 imm
= (ctx
->opcode
>> 16) & 0xFF;
13294 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
13295 temp
= (temp
<< 16) | temp
;
13296 temp
= (temp
<< 32) | temp
;
13297 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13305 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13306 imm
= (int16_t)(imm
<< 6) >> 6;
13307 temp
= ((target_long
)imm
<< 32) \
13308 | ((target_long
)imm
& 0xFFFFFFFF);
13309 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13317 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13318 imm
= (int16_t)(imm
<< 6) >> 6;
13320 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
13321 ((uint64_t)(uint16_t)imm
<< 32) |
13322 ((uint64_t)(uint16_t)imm
<< 16) |
13323 (uint64_t)(uint16_t)imm
;
13324 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
13329 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
13330 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
13331 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13332 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13333 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13334 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13335 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13339 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
13340 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13341 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13345 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
13346 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
13347 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13348 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
13349 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
13356 tcg_temp_free(val_t
);
13359 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
13360 uint32_t op1
, uint32_t op2
,
13361 int ret
, int v1
, int v2
, int check_ret
)
13367 if ((ret
== 0) && (check_ret
== 1)) {
13368 /* Treat as NOP. */
13372 t1
= tcg_temp_new();
13373 v1_t
= tcg_temp_new();
13374 v2_t
= tcg_temp_new();
13376 gen_load_gpr(v1_t
, v1
);
13377 gen_load_gpr(v2_t
, v2
);
13380 case OPC_CMPU_EQ_QB_DSP
:
13382 case OPC_CMPU_EQ_QB
:
13384 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
13386 case OPC_CMPU_LT_QB
:
13388 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
13390 case OPC_CMPU_LE_QB
:
13392 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
13394 case OPC_CMPGU_EQ_QB
:
13396 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13398 case OPC_CMPGU_LT_QB
:
13400 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13402 case OPC_CMPGU_LE_QB
:
13404 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
13406 case OPC_CMPGDU_EQ_QB
:
13408 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
13409 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13410 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13411 tcg_gen_shli_tl(t1
, t1
, 24);
13412 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13414 case OPC_CMPGDU_LT_QB
:
13416 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
13417 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13418 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13419 tcg_gen_shli_tl(t1
, t1
, 24);
13420 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13422 case OPC_CMPGDU_LE_QB
:
13424 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
13425 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
13426 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
13427 tcg_gen_shli_tl(t1
, t1
, 24);
13428 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
13430 case OPC_CMP_EQ_PH
:
13432 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
13434 case OPC_CMP_LT_PH
:
13436 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
13438 case OPC_CMP_LE_PH
:
13440 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
13444 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13448 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13450 case OPC_PACKRL_PH
:
13452 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
13456 #ifdef TARGET_MIPS64
13457 case OPC_CMPU_EQ_OB_DSP
:
13459 case OPC_CMP_EQ_PW
:
13461 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
13463 case OPC_CMP_LT_PW
:
13465 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
13467 case OPC_CMP_LE_PW
:
13469 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
13471 case OPC_CMP_EQ_QH
:
13473 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
13475 case OPC_CMP_LT_QH
:
13477 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
13479 case OPC_CMP_LE_QH
:
13481 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
13483 case OPC_CMPGDU_EQ_OB
:
13485 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13487 case OPC_CMPGDU_LT_OB
:
13489 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13491 case OPC_CMPGDU_LE_OB
:
13493 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13495 case OPC_CMPGU_EQ_OB
:
13497 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13499 case OPC_CMPGU_LT_OB
:
13501 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13503 case OPC_CMPGU_LE_OB
:
13505 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
13507 case OPC_CMPU_EQ_OB
:
13509 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
13511 case OPC_CMPU_LT_OB
:
13513 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
13515 case OPC_CMPU_LE_OB
:
13517 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
13519 case OPC_PACKRL_PW
:
13521 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
13525 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13529 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13533 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
13541 tcg_temp_free(v1_t
);
13542 tcg_temp_free(v2_t
);
13545 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
13546 uint32_t op1
, int rt
, int rs
, int sa
)
13553 /* Treat as NOP. */
13557 t0
= tcg_temp_new();
13558 gen_load_gpr(t0
, rs
);
13561 case OPC_APPEND_DSP
:
13562 switch (MASK_APPEND(ctx
->opcode
)) {
13565 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
13567 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13571 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13572 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13573 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
13574 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13576 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13580 if (sa
!= 0 && sa
!= 2) {
13581 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13582 tcg_gen_ext32u_tl(t0
, t0
);
13583 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
13584 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13586 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
13588 default: /* Invalid */
13589 MIPS_INVAL("MASK APPEND");
13590 gen_reserved_instruction(ctx
);
13594 #ifdef TARGET_MIPS64
13595 case OPC_DAPPEND_DSP
:
13596 switch (MASK_DAPPEND(ctx
->opcode
)) {
13599 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
13603 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
13604 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
13605 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
13609 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
13610 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
13611 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13616 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
13617 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
13618 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
13619 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
13622 default: /* Invalid */
13623 MIPS_INVAL("MASK DAPPEND");
13624 gen_reserved_instruction(ctx
);
13633 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
13634 int ret
, int v1
, int v2
, int check_ret
)
13642 if ((ret
== 0) && (check_ret
== 1)) {
13643 /* Treat as NOP. */
13647 t0
= tcg_temp_new();
13648 t1
= tcg_temp_new();
13649 v1_t
= tcg_temp_new();
13651 gen_load_gpr(v1_t
, v1
);
13654 case OPC_EXTR_W_DSP
:
13658 tcg_gen_movi_tl(t0
, v2
);
13659 tcg_gen_movi_tl(t1
, v1
);
13660 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13663 tcg_gen_movi_tl(t0
, v2
);
13664 tcg_gen_movi_tl(t1
, v1
);
13665 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13667 case OPC_EXTR_RS_W
:
13668 tcg_gen_movi_tl(t0
, v2
);
13669 tcg_gen_movi_tl(t1
, v1
);
13670 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13673 tcg_gen_movi_tl(t0
, v2
);
13674 tcg_gen_movi_tl(t1
, v1
);
13675 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13677 case OPC_EXTRV_S_H
:
13678 tcg_gen_movi_tl(t0
, v2
);
13679 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13682 tcg_gen_movi_tl(t0
, v2
);
13683 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13685 case OPC_EXTRV_R_W
:
13686 tcg_gen_movi_tl(t0
, v2
);
13687 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13689 case OPC_EXTRV_RS_W
:
13690 tcg_gen_movi_tl(t0
, v2
);
13691 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13694 tcg_gen_movi_tl(t0
, v2
);
13695 tcg_gen_movi_tl(t1
, v1
);
13696 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13699 tcg_gen_movi_tl(t0
, v2
);
13700 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13703 tcg_gen_movi_tl(t0
, v2
);
13704 tcg_gen_movi_tl(t1
, v1
);
13705 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13708 tcg_gen_movi_tl(t0
, v2
);
13709 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13712 imm
= (ctx
->opcode
>> 20) & 0x3F;
13713 tcg_gen_movi_tl(t0
, ret
);
13714 tcg_gen_movi_tl(t1
, imm
);
13715 gen_helper_shilo(t0
, t1
, cpu_env
);
13718 tcg_gen_movi_tl(t0
, ret
);
13719 gen_helper_shilo(t0
, v1_t
, cpu_env
);
13722 tcg_gen_movi_tl(t0
, ret
);
13723 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
13726 imm
= (ctx
->opcode
>> 11) & 0x3FF;
13727 tcg_gen_movi_tl(t0
, imm
);
13728 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
13731 imm
= (ctx
->opcode
>> 16) & 0x03FF;
13732 tcg_gen_movi_tl(t0
, imm
);
13733 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
13737 #ifdef TARGET_MIPS64
13738 case OPC_DEXTR_W_DSP
:
13742 tcg_gen_movi_tl(t0
, ret
);
13743 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
13747 int shift
= (ctx
->opcode
>> 19) & 0x7F;
13748 int ac
= (ctx
->opcode
>> 11) & 0x03;
13749 tcg_gen_movi_tl(t0
, shift
);
13750 tcg_gen_movi_tl(t1
, ac
);
13751 gen_helper_dshilo(t0
, t1
, cpu_env
);
13756 int ac
= (ctx
->opcode
>> 11) & 0x03;
13757 tcg_gen_movi_tl(t0
, ac
);
13758 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
13762 tcg_gen_movi_tl(t0
, v2
);
13763 tcg_gen_movi_tl(t1
, v1
);
13765 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13768 tcg_gen_movi_tl(t0
, v2
);
13769 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13772 tcg_gen_movi_tl(t0
, v2
);
13773 tcg_gen_movi_tl(t1
, v1
);
13774 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13777 tcg_gen_movi_tl(t0
, v2
);
13778 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13781 tcg_gen_movi_tl(t0
, v2
);
13782 tcg_gen_movi_tl(t1
, v1
);
13783 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13785 case OPC_DEXTR_R_L
:
13786 tcg_gen_movi_tl(t0
, v2
);
13787 tcg_gen_movi_tl(t1
, v1
);
13788 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13790 case OPC_DEXTR_RS_L
:
13791 tcg_gen_movi_tl(t0
, v2
);
13792 tcg_gen_movi_tl(t1
, v1
);
13793 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13796 tcg_gen_movi_tl(t0
, v2
);
13797 tcg_gen_movi_tl(t1
, v1
);
13798 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13800 case OPC_DEXTR_R_W
:
13801 tcg_gen_movi_tl(t0
, v2
);
13802 tcg_gen_movi_tl(t1
, v1
);
13803 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13805 case OPC_DEXTR_RS_W
:
13806 tcg_gen_movi_tl(t0
, v2
);
13807 tcg_gen_movi_tl(t1
, v1
);
13808 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13810 case OPC_DEXTR_S_H
:
13811 tcg_gen_movi_tl(t0
, v2
);
13812 tcg_gen_movi_tl(t1
, v1
);
13813 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
13815 case OPC_DEXTRV_S_H
:
13816 tcg_gen_movi_tl(t0
, v2
);
13817 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13820 tcg_gen_movi_tl(t0
, v2
);
13821 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13823 case OPC_DEXTRV_R_L
:
13824 tcg_gen_movi_tl(t0
, v2
);
13825 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13827 case OPC_DEXTRV_RS_L
:
13828 tcg_gen_movi_tl(t0
, v2
);
13829 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13832 tcg_gen_movi_tl(t0
, v2
);
13833 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13835 case OPC_DEXTRV_R_W
:
13836 tcg_gen_movi_tl(t0
, v2
);
13837 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13839 case OPC_DEXTRV_RS_W
:
13840 tcg_gen_movi_tl(t0
, v2
);
13841 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
13850 tcg_temp_free(v1_t
);
13853 /* End MIPSDSP functions. */
13855 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
13857 int rs
, rt
, rd
, sa
;
13860 rs
= (ctx
->opcode
>> 21) & 0x1f;
13861 rt
= (ctx
->opcode
>> 16) & 0x1f;
13862 rd
= (ctx
->opcode
>> 11) & 0x1f;
13863 sa
= (ctx
->opcode
>> 6) & 0x1f;
13865 op1
= MASK_SPECIAL(ctx
->opcode
);
13871 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13881 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13884 MIPS_INVAL("special_r6 muldiv");
13885 gen_reserved_instruction(ctx
);
13891 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13895 if (rt
== 0 && sa
== 1) {
13897 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13898 * We need additionally to check other fields.
13900 gen_cl(ctx
, op1
, rd
, rs
);
13902 gen_reserved_instruction(ctx
);
13906 if (is_uhi(ctx
, extract32(ctx
->opcode
, 6, 20))) {
13907 ctx
->base
.is_jmp
= DISAS_SEMIHOST
;
13909 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13910 gen_reserved_instruction(ctx
);
13912 generate_exception_end(ctx
, EXCP_DBp
);
13916 #if defined(TARGET_MIPS64)
13919 if (rt
== 0 && sa
== 1) {
13921 * Major opcode and function field is shared with preR6 MFHI/MTHI.
13922 * We need additionally to check other fields.
13924 check_mips_64(ctx
);
13925 gen_cl(ctx
, op1
, rd
, rs
);
13927 gen_reserved_instruction(ctx
);
13935 op2
= MASK_R6_MULDIV(ctx
->opcode
);
13945 check_mips_64(ctx
);
13946 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
13949 MIPS_INVAL("special_r6 muldiv");
13950 gen_reserved_instruction(ctx
);
13955 default: /* Invalid */
13956 MIPS_INVAL("special_r6");
13957 gen_reserved_instruction(ctx
);
13962 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
13964 int rs
= extract32(ctx
->opcode
, 21, 5);
13965 int rt
= extract32(ctx
->opcode
, 16, 5);
13966 int rd
= extract32(ctx
->opcode
, 11, 5);
13967 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
13970 case OPC_MOVN
: /* Conditional move */
13972 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
13974 case OPC_MFHI
: /* Move from HI/LO */
13976 gen_HILO(ctx
, op1
, 0, rd
);
13979 case OPC_MTLO
: /* Move to HI/LO */
13980 gen_HILO(ctx
, op1
, 0, rs
);
13984 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
13988 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
13990 #if defined(TARGET_MIPS64)
13995 check_insn_opc_user_only(ctx
, INSN_R5900
);
13996 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14000 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
14002 default: /* Invalid */
14003 MIPS_INVAL("special_tx79");
14004 gen_reserved_instruction(ctx
);
14009 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14014 rs
= (ctx
->opcode
>> 21) & 0x1f;
14015 rt
= (ctx
->opcode
>> 16) & 0x1f;
14016 rd
= (ctx
->opcode
>> 11) & 0x1f;
14018 op1
= MASK_SPECIAL(ctx
->opcode
);
14020 case OPC_MOVN
: /* Conditional move */
14022 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
|
14023 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
14024 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
14026 case OPC_MFHI
: /* Move from HI/LO */
14028 gen_HILO(ctx
, op1
, rs
& 3, rd
);
14031 case OPC_MTLO
: /* Move to HI/LO */
14032 gen_HILO(ctx
, op1
, rd
& 3, rs
);
14035 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
14036 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14037 check_cp1_enabled(ctx
);
14038 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
14039 (ctx
->opcode
>> 16) & 1);
14041 generate_exception_err(ctx
, EXCP_CpU
, 1);
14046 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14050 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14052 #if defined(TARGET_MIPS64)
14057 check_insn(ctx
, ISA_MIPS3
);
14058 check_mips_64(ctx
);
14059 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
14063 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
14066 #ifdef MIPS_STRICT_STANDARD
14067 MIPS_INVAL("SPIM");
14068 gen_reserved_instruction(ctx
);
14070 /* Implemented as RI exception for now. */
14071 MIPS_INVAL("spim (unofficial)");
14072 gen_reserved_instruction(ctx
);
14075 default: /* Invalid */
14076 MIPS_INVAL("special_legacy");
14077 gen_reserved_instruction(ctx
);
14082 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
14084 int rs
, rt
, rd
, sa
;
14087 rs
= (ctx
->opcode
>> 21) & 0x1f;
14088 rt
= (ctx
->opcode
>> 16) & 0x1f;
14089 rd
= (ctx
->opcode
>> 11) & 0x1f;
14090 sa
= (ctx
->opcode
>> 6) & 0x1f;
14092 op1
= MASK_SPECIAL(ctx
->opcode
);
14094 case OPC_SLL
: /* Shift with immediate */
14095 if (sa
== 5 && rd
== 0 &&
14096 rs
== 0 && rt
== 0) { /* PAUSE */
14097 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
14098 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
14099 gen_reserved_instruction(ctx
);
14105 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14108 switch ((ctx
->opcode
>> 21) & 0x1f) {
14110 /* rotr is decoded as srl on non-R2 CPUs */
14111 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14116 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14119 gen_reserved_instruction(ctx
);
14127 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14129 case OPC_SLLV
: /* Shifts */
14131 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14134 switch ((ctx
->opcode
>> 6) & 0x1f) {
14136 /* rotrv is decoded as srlv on non-R2 CPUs */
14137 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14142 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14145 gen_reserved_instruction(ctx
);
14149 case OPC_SLT
: /* Set on less than */
14151 gen_slt(ctx
, op1
, rd
, rs
, rt
);
14153 case OPC_AND
: /* Logic*/
14157 gen_logic(ctx
, op1
, rd
, rs
, rt
);
14160 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
14162 case OPC_TGE
: /* Traps */
14168 check_insn(ctx
, ISA_MIPS2
);
14169 gen_trap(ctx
, op1
, rs
, rt
, -1, extract32(ctx
->opcode
, 6, 10));
14172 /* Pmon entry point, also R4010 selsl */
14173 #ifdef MIPS_STRICT_STANDARD
14174 MIPS_INVAL("PMON / selsl");
14175 gen_reserved_instruction(ctx
);
14177 gen_helper_pmon(cpu_env
, tcg_constant_i32(sa
));
14181 generate_exception_end(ctx
, EXCP_SYSCALL
);
14184 generate_exception_break(ctx
, extract32(ctx
->opcode
, 6, 20));
14187 check_insn(ctx
, ISA_MIPS2
);
14188 gen_sync(extract32(ctx
->opcode
, 6, 5));
14191 #if defined(TARGET_MIPS64)
14192 /* MIPS64 specific opcodes */
14197 check_insn(ctx
, ISA_MIPS3
);
14198 check_mips_64(ctx
);
14199 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14202 switch ((ctx
->opcode
>> 21) & 0x1f) {
14204 /* drotr is decoded as dsrl on non-R2 CPUs */
14205 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14210 check_insn(ctx
, ISA_MIPS3
);
14211 check_mips_64(ctx
);
14212 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14215 gen_reserved_instruction(ctx
);
14220 switch ((ctx
->opcode
>> 21) & 0x1f) {
14222 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
14223 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14228 check_insn(ctx
, ISA_MIPS3
);
14229 check_mips_64(ctx
);
14230 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
14233 gen_reserved_instruction(ctx
);
14241 check_insn(ctx
, ISA_MIPS3
);
14242 check_mips_64(ctx
);
14243 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14247 check_insn(ctx
, ISA_MIPS3
);
14248 check_mips_64(ctx
);
14249 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14252 switch ((ctx
->opcode
>> 6) & 0x1f) {
14254 /* drotrv is decoded as dsrlv on non-R2 CPUs */
14255 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
14260 check_insn(ctx
, ISA_MIPS3
);
14261 check_mips_64(ctx
);
14262 gen_shift(ctx
, op1
, rd
, rs
, rt
);
14265 gen_reserved_instruction(ctx
);
14271 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
14272 decode_opc_special_r6(env
, ctx
);
14273 } else if (ctx
->insn_flags
& INSN_R5900
) {
14274 decode_opc_special_tx79(env
, ctx
);
14276 decode_opc_special_legacy(env
, ctx
);
14282 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14287 rs
= (ctx
->opcode
>> 21) & 0x1f;
14288 rt
= (ctx
->opcode
>> 16) & 0x1f;
14289 rd
= (ctx
->opcode
>> 11) & 0x1f;
14291 op1
= MASK_SPECIAL2(ctx
->opcode
);
14293 case OPC_MADD
: /* Multiply and add/sub */
14297 check_insn(ctx
, ISA_MIPS_R1
);
14298 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
14301 gen_arith(ctx
, op1
, rd
, rs
, rt
);
14304 case OPC_DIVU_G_2F
:
14305 case OPC_MULT_G_2F
:
14306 case OPC_MULTU_G_2F
:
14308 case OPC_MODU_G_2F
:
14309 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14310 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14314 check_insn(ctx
, ISA_MIPS_R1
);
14315 gen_cl(ctx
, op1
, rd
, rs
);
14318 if (is_uhi(ctx
, extract32(ctx
->opcode
, 6, 20))) {
14319 ctx
->base
.is_jmp
= DISAS_SEMIHOST
;
14322 * XXX: not clear which exception should be raised
14323 * when in debug mode...
14325 check_insn(ctx
, ISA_MIPS_R1
);
14326 generate_exception_end(ctx
, EXCP_DBp
);
14329 #if defined(TARGET_MIPS64)
14332 check_insn(ctx
, ISA_MIPS_R1
);
14333 check_mips_64(ctx
);
14334 gen_cl(ctx
, op1
, rd
, rs
);
14336 case OPC_DMULT_G_2F
:
14337 case OPC_DMULTU_G_2F
:
14338 case OPC_DDIV_G_2F
:
14339 case OPC_DDIVU_G_2F
:
14340 case OPC_DMOD_G_2F
:
14341 case OPC_DMODU_G_2F
:
14342 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
14343 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14346 default: /* Invalid */
14347 MIPS_INVAL("special2_legacy");
14348 gen_reserved_instruction(ctx
);
14353 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
14355 int rs
, rt
, rd
, sa
;
14359 rs
= (ctx
->opcode
>> 21) & 0x1f;
14360 rt
= (ctx
->opcode
>> 16) & 0x1f;
14361 rd
= (ctx
->opcode
>> 11) & 0x1f;
14362 sa
= (ctx
->opcode
>> 6) & 0x1f;
14363 imm
= (int16_t)ctx
->opcode
>> 7;
14365 op1
= MASK_SPECIAL3(ctx
->opcode
);
14369 /* hint codes 24-31 are reserved and signal RI */
14370 gen_reserved_instruction(ctx
);
14372 /* Treat as NOP. */
14375 check_cp0_enabled(ctx
);
14376 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
14377 gen_cache_operation(ctx
, rt
, rs
, imm
);
14381 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
14384 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14389 /* Treat as NOP. */
14392 op2
= MASK_BSHFL(ctx
->opcode
);
14398 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
14401 gen_bitswap(ctx
, op2
, rd
, rt
);
14406 #ifndef CONFIG_USER_ONLY
14408 if (unlikely(ctx
->gi
<= 1)) {
14409 gen_reserved_instruction(ctx
);
14411 check_cp0_enabled(ctx
);
14412 switch ((ctx
->opcode
>> 6) & 3) {
14413 case 0: /* GINVI */
14414 /* Treat as NOP. */
14416 case 2: /* GINVT */
14417 gen_helper_0e1i(ginvt
, cpu_gpr
[rs
], extract32(ctx
->opcode
, 8, 2));
14420 gen_reserved_instruction(ctx
);
14425 #if defined(TARGET_MIPS64)
14427 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
14430 gen_ld(ctx
, op1
, rt
, rs
, imm
);
14433 check_mips_64(ctx
);
14436 /* Treat as NOP. */
14439 op2
= MASK_DBSHFL(ctx
->opcode
);
14449 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
14452 gen_bitswap(ctx
, op2
, rd
, rt
);
14459 default: /* Invalid */
14460 MIPS_INVAL("special3_r6");
14461 gen_reserved_instruction(ctx
);
14466 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
14471 rs
= (ctx
->opcode
>> 21) & 0x1f;
14472 rt
= (ctx
->opcode
>> 16) & 0x1f;
14473 rd
= (ctx
->opcode
>> 11) & 0x1f;
14475 op1
= MASK_SPECIAL3(ctx
->opcode
);
14478 case OPC_DIVU_G_2E
:
14480 case OPC_MODU_G_2E
:
14481 case OPC_MULT_G_2E
:
14482 case OPC_MULTU_G_2E
:
14484 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
14485 * the same mask and op1.
14487 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
14488 op2
= MASK_ADDUH_QB(ctx
->opcode
);
14491 case OPC_ADDUH_R_QB
:
14493 case OPC_ADDQH_R_PH
:
14495 case OPC_ADDQH_R_W
:
14497 case OPC_SUBUH_R_QB
:
14499 case OPC_SUBQH_R_PH
:
14501 case OPC_SUBQH_R_W
:
14502 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14507 case OPC_MULQ_RS_W
:
14508 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14511 MIPS_INVAL("MASK ADDUH.QB");
14512 gen_reserved_instruction(ctx
);
14515 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
14516 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14518 gen_reserved_instruction(ctx
);
14522 op2
= MASK_LX(ctx
->opcode
);
14524 #if defined(TARGET_MIPS64)
14530 gen_mips_lx(ctx
, op2
, rd
, rs
, rt
);
14532 default: /* Invalid */
14533 MIPS_INVAL("MASK LX");
14534 gen_reserved_instruction(ctx
);
14538 case OPC_ABSQ_S_PH_DSP
:
14539 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
14541 case OPC_ABSQ_S_QB
:
14542 case OPC_ABSQ_S_PH
:
14544 case OPC_PRECEQ_W_PHL
:
14545 case OPC_PRECEQ_W_PHR
:
14546 case OPC_PRECEQU_PH_QBL
:
14547 case OPC_PRECEQU_PH_QBR
:
14548 case OPC_PRECEQU_PH_QBLA
:
14549 case OPC_PRECEQU_PH_QBRA
:
14550 case OPC_PRECEU_PH_QBL
:
14551 case OPC_PRECEU_PH_QBR
:
14552 case OPC_PRECEU_PH_QBLA
:
14553 case OPC_PRECEU_PH_QBRA
:
14554 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14561 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14564 MIPS_INVAL("MASK ABSQ_S.PH");
14565 gen_reserved_instruction(ctx
);
14569 case OPC_ADDU_QB_DSP
:
14570 op2
= MASK_ADDU_QB(ctx
->opcode
);
14573 case OPC_ADDQ_S_PH
:
14576 case OPC_ADDU_S_QB
:
14578 case OPC_ADDU_S_PH
:
14580 case OPC_SUBQ_S_PH
:
14583 case OPC_SUBU_S_QB
:
14585 case OPC_SUBU_S_PH
:
14589 case OPC_RADDU_W_QB
:
14590 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14592 case OPC_MULEU_S_PH_QBL
:
14593 case OPC_MULEU_S_PH_QBR
:
14594 case OPC_MULQ_RS_PH
:
14595 case OPC_MULEQ_S_W_PHL
:
14596 case OPC_MULEQ_S_W_PHR
:
14597 case OPC_MULQ_S_PH
:
14598 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14600 default: /* Invalid */
14601 MIPS_INVAL("MASK ADDU.QB");
14602 gen_reserved_instruction(ctx
);
14607 case OPC_CMPU_EQ_QB_DSP
:
14608 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
14610 case OPC_PRECR_SRA_PH_W
:
14611 case OPC_PRECR_SRA_R_PH_W
:
14612 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14614 case OPC_PRECR_QB_PH
:
14615 case OPC_PRECRQ_QB_PH
:
14616 case OPC_PRECRQ_PH_W
:
14617 case OPC_PRECRQ_RS_PH_W
:
14618 case OPC_PRECRQU_S_QB_PH
:
14619 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14621 case OPC_CMPU_EQ_QB
:
14622 case OPC_CMPU_LT_QB
:
14623 case OPC_CMPU_LE_QB
:
14624 case OPC_CMP_EQ_PH
:
14625 case OPC_CMP_LT_PH
:
14626 case OPC_CMP_LE_PH
:
14627 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14629 case OPC_CMPGU_EQ_QB
:
14630 case OPC_CMPGU_LT_QB
:
14631 case OPC_CMPGU_LE_QB
:
14632 case OPC_CMPGDU_EQ_QB
:
14633 case OPC_CMPGDU_LT_QB
:
14634 case OPC_CMPGDU_LE_QB
:
14637 case OPC_PACKRL_PH
:
14638 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14640 default: /* Invalid */
14641 MIPS_INVAL("MASK CMPU.EQ.QB");
14642 gen_reserved_instruction(ctx
);
14646 case OPC_SHLL_QB_DSP
:
14647 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14649 case OPC_DPA_W_PH_DSP
:
14650 op2
= MASK_DPA_W_PH(ctx
->opcode
);
14652 case OPC_DPAU_H_QBL
:
14653 case OPC_DPAU_H_QBR
:
14654 case OPC_DPSU_H_QBL
:
14655 case OPC_DPSU_H_QBR
:
14657 case OPC_DPAX_W_PH
:
14658 case OPC_DPAQ_S_W_PH
:
14659 case OPC_DPAQX_S_W_PH
:
14660 case OPC_DPAQX_SA_W_PH
:
14662 case OPC_DPSX_W_PH
:
14663 case OPC_DPSQ_S_W_PH
:
14664 case OPC_DPSQX_S_W_PH
:
14665 case OPC_DPSQX_SA_W_PH
:
14666 case OPC_MULSAQ_S_W_PH
:
14667 case OPC_DPAQ_SA_L_W
:
14668 case OPC_DPSQ_SA_L_W
:
14669 case OPC_MAQ_S_W_PHL
:
14670 case OPC_MAQ_S_W_PHR
:
14671 case OPC_MAQ_SA_W_PHL
:
14672 case OPC_MAQ_SA_W_PHR
:
14673 case OPC_MULSA_W_PH
:
14674 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14676 default: /* Invalid */
14677 MIPS_INVAL("MASK DPAW.PH");
14678 gen_reserved_instruction(ctx
);
14683 op2
= MASK_INSV(ctx
->opcode
);
14694 t0
= tcg_temp_new();
14695 t1
= tcg_temp_new();
14697 gen_load_gpr(t0
, rt
);
14698 gen_load_gpr(t1
, rs
);
14700 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14706 default: /* Invalid */
14707 MIPS_INVAL("MASK INSV");
14708 gen_reserved_instruction(ctx
);
14712 case OPC_APPEND_DSP
:
14713 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14715 case OPC_EXTR_W_DSP
:
14716 op2
= MASK_EXTR_W(ctx
->opcode
);
14720 case OPC_EXTR_RS_W
:
14722 case OPC_EXTRV_S_H
:
14724 case OPC_EXTRV_R_W
:
14725 case OPC_EXTRV_RS_W
:
14730 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14733 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14739 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14741 default: /* Invalid */
14742 MIPS_INVAL("MASK EXTR.W");
14743 gen_reserved_instruction(ctx
);
14747 #if defined(TARGET_MIPS64)
14748 case OPC_DDIV_G_2E
:
14749 case OPC_DDIVU_G_2E
:
14750 case OPC_DMULT_G_2E
:
14751 case OPC_DMULTU_G_2E
:
14752 case OPC_DMOD_G_2E
:
14753 case OPC_DMODU_G_2E
:
14754 check_insn(ctx
, INSN_LOONGSON2E
);
14755 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
14757 case OPC_ABSQ_S_QH_DSP
:
14758 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
14760 case OPC_PRECEQ_L_PWL
:
14761 case OPC_PRECEQ_L_PWR
:
14762 case OPC_PRECEQ_PW_QHL
:
14763 case OPC_PRECEQ_PW_QHR
:
14764 case OPC_PRECEQ_PW_QHLA
:
14765 case OPC_PRECEQ_PW_QHRA
:
14766 case OPC_PRECEQU_QH_OBL
:
14767 case OPC_PRECEQU_QH_OBR
:
14768 case OPC_PRECEQU_QH_OBLA
:
14769 case OPC_PRECEQU_QH_OBRA
:
14770 case OPC_PRECEU_QH_OBL
:
14771 case OPC_PRECEU_QH_OBR
:
14772 case OPC_PRECEU_QH_OBLA
:
14773 case OPC_PRECEU_QH_OBRA
:
14774 case OPC_ABSQ_S_OB
:
14775 case OPC_ABSQ_S_PW
:
14776 case OPC_ABSQ_S_QH
:
14777 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14785 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
14787 default: /* Invalid */
14788 MIPS_INVAL("MASK ABSQ_S.QH");
14789 gen_reserved_instruction(ctx
);
14793 case OPC_ADDU_OB_DSP
:
14794 op2
= MASK_ADDU_OB(ctx
->opcode
);
14796 case OPC_RADDU_L_OB
:
14798 case OPC_SUBQ_S_PW
:
14800 case OPC_SUBQ_S_QH
:
14802 case OPC_SUBU_S_OB
:
14804 case OPC_SUBU_S_QH
:
14806 case OPC_SUBUH_R_OB
:
14808 case OPC_ADDQ_S_PW
:
14810 case OPC_ADDQ_S_QH
:
14812 case OPC_ADDU_S_OB
:
14814 case OPC_ADDU_S_QH
:
14816 case OPC_ADDUH_R_OB
:
14817 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14819 case OPC_MULEQ_S_PW_QHL
:
14820 case OPC_MULEQ_S_PW_QHR
:
14821 case OPC_MULEU_S_QH_OBL
:
14822 case OPC_MULEU_S_QH_OBR
:
14823 case OPC_MULQ_RS_QH
:
14824 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14826 default: /* Invalid */
14827 MIPS_INVAL("MASK ADDU.OB");
14828 gen_reserved_instruction(ctx
);
14832 case OPC_CMPU_EQ_OB_DSP
:
14833 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
14835 case OPC_PRECR_SRA_QH_PW
:
14836 case OPC_PRECR_SRA_R_QH_PW
:
14837 /* Return value is rt. */
14838 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
14840 case OPC_PRECR_OB_QH
:
14841 case OPC_PRECRQ_OB_QH
:
14842 case OPC_PRECRQ_PW_L
:
14843 case OPC_PRECRQ_QH_PW
:
14844 case OPC_PRECRQ_RS_QH_PW
:
14845 case OPC_PRECRQU_S_OB_QH
:
14846 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
14848 case OPC_CMPU_EQ_OB
:
14849 case OPC_CMPU_LT_OB
:
14850 case OPC_CMPU_LE_OB
:
14851 case OPC_CMP_EQ_QH
:
14852 case OPC_CMP_LT_QH
:
14853 case OPC_CMP_LE_QH
:
14854 case OPC_CMP_EQ_PW
:
14855 case OPC_CMP_LT_PW
:
14856 case OPC_CMP_LE_PW
:
14857 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14859 case OPC_CMPGDU_EQ_OB
:
14860 case OPC_CMPGDU_LT_OB
:
14861 case OPC_CMPGDU_LE_OB
:
14862 case OPC_CMPGU_EQ_OB
:
14863 case OPC_CMPGU_LT_OB
:
14864 case OPC_CMPGU_LE_OB
:
14865 case OPC_PACKRL_PW
:
14869 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
14871 default: /* Invalid */
14872 MIPS_INVAL("MASK CMPU_EQ.OB");
14873 gen_reserved_instruction(ctx
);
14877 case OPC_DAPPEND_DSP
:
14878 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
14880 case OPC_DEXTR_W_DSP
:
14881 op2
= MASK_DEXTR_W(ctx
->opcode
);
14888 case OPC_DEXTR_R_L
:
14889 case OPC_DEXTR_RS_L
:
14891 case OPC_DEXTR_R_W
:
14892 case OPC_DEXTR_RS_W
:
14893 case OPC_DEXTR_S_H
:
14895 case OPC_DEXTRV_R_L
:
14896 case OPC_DEXTRV_RS_L
:
14897 case OPC_DEXTRV_S_H
:
14899 case OPC_DEXTRV_R_W
:
14900 case OPC_DEXTRV_RS_W
:
14901 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
14906 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14908 default: /* Invalid */
14909 MIPS_INVAL("MASK EXTR.W");
14910 gen_reserved_instruction(ctx
);
14914 case OPC_DPAQ_W_QH_DSP
:
14915 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
14917 case OPC_DPAU_H_OBL
:
14918 case OPC_DPAU_H_OBR
:
14919 case OPC_DPSU_H_OBL
:
14920 case OPC_DPSU_H_OBR
:
14922 case OPC_DPAQ_S_W_QH
:
14924 case OPC_DPSQ_S_W_QH
:
14925 case OPC_MULSAQ_S_W_QH
:
14926 case OPC_DPAQ_SA_L_PW
:
14927 case OPC_DPSQ_SA_L_PW
:
14928 case OPC_MULSAQ_S_L_PW
:
14929 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14931 case OPC_MAQ_S_W_QHLL
:
14932 case OPC_MAQ_S_W_QHLR
:
14933 case OPC_MAQ_S_W_QHRL
:
14934 case OPC_MAQ_S_W_QHRR
:
14935 case OPC_MAQ_SA_W_QHLL
:
14936 case OPC_MAQ_SA_W_QHLR
:
14937 case OPC_MAQ_SA_W_QHRL
:
14938 case OPC_MAQ_SA_W_QHRR
:
14939 case OPC_MAQ_S_L_PWL
:
14940 case OPC_MAQ_S_L_PWR
:
14945 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
14947 default: /* Invalid */
14948 MIPS_INVAL("MASK DPAQ.W.QH");
14949 gen_reserved_instruction(ctx
);
14953 case OPC_DINSV_DSP
:
14954 op2
= MASK_INSV(ctx
->opcode
);
14966 t0
= tcg_temp_new();
14967 t1
= tcg_temp_new();
14969 gen_load_gpr(t0
, rt
);
14970 gen_load_gpr(t1
, rs
);
14972 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
14978 default: /* Invalid */
14979 MIPS_INVAL("MASK DINSV");
14980 gen_reserved_instruction(ctx
);
14984 case OPC_SHLL_OB_DSP
:
14985 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
14988 default: /* Invalid */
14989 MIPS_INVAL("special3_legacy");
14990 gen_reserved_instruction(ctx
);
14996 #if defined(TARGET_MIPS64)
14998 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
15000 uint32_t opc
= MASK_MMI(ctx
->opcode
);
15001 int rs
= extract32(ctx
->opcode
, 21, 5);
15002 int rt
= extract32(ctx
->opcode
, 16, 5);
15003 int rd
= extract32(ctx
->opcode
, 11, 5);
15006 case MMI_OPC_MULT1
:
15007 case MMI_OPC_MULTU1
:
15009 case MMI_OPC_MADDU
:
15010 case MMI_OPC_MADD1
:
15011 case MMI_OPC_MADDU1
:
15012 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
15015 case MMI_OPC_DIVU1
:
15016 gen_div1_tx79(ctx
, opc
, rs
, rt
);
15019 MIPS_INVAL("TX79 MMI class");
15020 gen_reserved_instruction(ctx
);
15025 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
15027 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_SQ */
15031 * The TX79-specific instruction Store Quadword
15033 * +--------+-------+-------+------------------------+
15034 * | 011111 | base | rt | offset | SQ
15035 * +--------+-------+-------+------------------------+
15038 * has the same opcode as the Read Hardware Register instruction
15040 * +--------+-------+-------+-------+-------+--------+
15041 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
15042 * +--------+-------+-------+-------+-------+--------+
15045 * that is required, trapped and emulated by the Linux kernel. However, all
15046 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
15047 * offset is odd. Therefore all valid SQ instructions can execute normally.
15048 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
15049 * between SQ and RDHWR, as the Linux kernel does.
15051 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
15053 int base
= extract32(ctx
->opcode
, 21, 5);
15054 int rt
= extract32(ctx
->opcode
, 16, 5);
15055 int offset
= extract32(ctx
->opcode
, 0, 16);
15057 #ifdef CONFIG_USER_ONLY
15058 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
15059 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
15061 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
15062 int rd
= extract32(ctx
->opcode
, 11, 5);
15064 gen_rdhwr(ctx
, rt
, rd
, 0);
15069 gen_mmi_sq(ctx
, base
, rt
, offset
);
15074 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
15076 int rs
, rt
, rd
, sa
;
15080 rs
= (ctx
->opcode
>> 21) & 0x1f;
15081 rt
= (ctx
->opcode
>> 16) & 0x1f;
15082 rd
= (ctx
->opcode
>> 11) & 0x1f;
15083 sa
= (ctx
->opcode
>> 6) & 0x1f;
15084 imm
= sextract32(ctx
->opcode
, 7, 9);
15086 op1
= MASK_SPECIAL3(ctx
->opcode
);
15089 * EVA loads and stores overlap Loongson 2E instructions decoded by
15090 * decode_opc_special3_legacy(), so be careful to allow their decoding when
15103 check_cp0_enabled(ctx
);
15104 gen_ld(ctx
, op1
, rt
, rs
, imm
);
15111 check_cp0_enabled(ctx
);
15112 gen_st(ctx
, op1
, rt
, rs
, imm
);
15115 check_cp0_enabled(ctx
);
15116 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
15120 check_cp0_enabled(ctx
);
15121 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15122 gen_cache_operation(ctx
, rt
, rs
, imm
);
15126 check_cp0_enabled(ctx
);
15127 /* Treat as NOP. */
15135 check_insn(ctx
, ISA_MIPS_R2
);
15136 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15139 op2
= MASK_BSHFL(ctx
->opcode
);
15146 check_insn(ctx
, ISA_MIPS_R6
);
15147 decode_opc_special3_r6(env
, ctx
);
15150 check_insn(ctx
, ISA_MIPS_R2
);
15151 gen_bshfl(ctx
, op2
, rt
, rd
);
15155 #if defined(TARGET_MIPS64)
15162 check_insn(ctx
, ISA_MIPS_R2
);
15163 check_mips_64(ctx
);
15164 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
15167 op2
= MASK_DBSHFL(ctx
->opcode
);
15178 check_insn(ctx
, ISA_MIPS_R6
);
15179 decode_opc_special3_r6(env
, ctx
);
15182 check_insn(ctx
, ISA_MIPS_R2
);
15183 check_mips_64(ctx
);
15184 op2
= MASK_DBSHFL(ctx
->opcode
);
15185 gen_bshfl(ctx
, op2
, rt
, rd
);
15191 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
15196 TCGv t0
= tcg_temp_new();
15197 TCGv t1
= tcg_temp_new();
15199 gen_load_gpr(t0
, rt
);
15200 gen_load_gpr(t1
, rs
);
15201 gen_helper_fork(t0
, t1
);
15209 TCGv t0
= tcg_temp_new();
15211 gen_load_gpr(t0
, rs
);
15212 gen_helper_yield(t0
, cpu_env
, t0
);
15213 gen_store_gpr(t0
, rd
);
15218 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15219 decode_opc_special3_r6(env
, ctx
);
15221 decode_opc_special3_legacy(env
, ctx
);
15226 static bool decode_opc_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
15229 int rs
, rt
, rd
, sa
;
15233 op
= MASK_OP_MAJOR(ctx
->opcode
);
15234 rs
= (ctx
->opcode
>> 21) & 0x1f;
15235 rt
= (ctx
->opcode
>> 16) & 0x1f;
15236 rd
= (ctx
->opcode
>> 11) & 0x1f;
15237 sa
= (ctx
->opcode
>> 6) & 0x1f;
15238 imm
= (int16_t)ctx
->opcode
;
15241 decode_opc_special(env
, ctx
);
15244 #if defined(TARGET_MIPS64)
15245 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
15246 decode_mmi(env
, ctx
);
15250 if (TARGET_LONG_BITS
== 32 && (ctx
->insn_flags
& ASE_MXU
)) {
15251 if (MASK_SPECIAL2(ctx
->opcode
) == OPC_MUL
) {
15252 gen_arith(ctx
, OPC_MUL
, rd
, rs
, rt
);
15254 decode_ase_mxu(ctx
, ctx
->opcode
);
15258 decode_opc_special2_legacy(env
, ctx
);
15261 #if defined(TARGET_MIPS64)
15262 if (ctx
->insn_flags
& INSN_R5900
) {
15263 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
15265 decode_opc_special3(env
, ctx
);
15268 decode_opc_special3(env
, ctx
);
15272 op1
= MASK_REGIMM(ctx
->opcode
);
15274 case OPC_BLTZL
: /* REGIMM branches */
15278 check_insn(ctx
, ISA_MIPS2
);
15279 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15283 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15287 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15289 /* OPC_NAL, OPC_BAL */
15290 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
15292 gen_reserved_instruction(ctx
);
15295 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
15298 case OPC_TGEI
: /* REGIMM traps */
15304 check_insn(ctx
, ISA_MIPS2
);
15305 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15306 gen_trap(ctx
, op1
, rs
, -1, imm
, 0);
15309 check_insn(ctx
, ISA_MIPS_R6
);
15310 gen_reserved_instruction(ctx
);
15313 check_insn(ctx
, ISA_MIPS_R2
);
15315 * Break the TB to be able to sync copied instructions
15318 ctx
->base
.is_jmp
= DISAS_STOP
;
15320 case OPC_BPOSGE32
: /* MIPS DSP branch */
15321 #if defined(TARGET_MIPS64)
15325 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
15327 #if defined(TARGET_MIPS64)
15329 check_insn(ctx
, ISA_MIPS_R6
);
15330 check_mips_64(ctx
);
15332 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
15336 check_insn(ctx
, ISA_MIPS_R6
);
15337 check_mips_64(ctx
);
15339 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
15343 default: /* Invalid */
15344 MIPS_INVAL("regimm");
15345 gen_reserved_instruction(ctx
);
15350 check_cp0_enabled(ctx
);
15351 op1
= MASK_CP0(ctx
->opcode
);
15359 #if defined(TARGET_MIPS64)
15363 #ifndef CONFIG_USER_ONLY
15364 gen_cp0(env
, ctx
, op1
, rt
, rd
);
15365 #endif /* !CONFIG_USER_ONLY */
15383 #ifndef CONFIG_USER_ONLY
15384 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
15385 #endif /* !CONFIG_USER_ONLY */
15388 #ifndef CONFIG_USER_ONLY
15391 TCGv t0
= tcg_temp_new();
15393 op2
= MASK_MFMC0(ctx
->opcode
);
15397 gen_helper_dmt(t0
);
15398 gen_store_gpr(t0
, rt
);
15402 gen_helper_emt(t0
);
15403 gen_store_gpr(t0
, rt
);
15407 gen_helper_dvpe(t0
, cpu_env
);
15408 gen_store_gpr(t0
, rt
);
15412 gen_helper_evpe(t0
, cpu_env
);
15413 gen_store_gpr(t0
, rt
);
15416 check_insn(ctx
, ISA_MIPS_R6
);
15418 gen_helper_dvp(t0
, cpu_env
);
15419 gen_store_gpr(t0
, rt
);
15423 check_insn(ctx
, ISA_MIPS_R6
);
15425 gen_helper_evp(t0
, cpu_env
);
15426 gen_store_gpr(t0
, rt
);
15430 check_insn(ctx
, ISA_MIPS_R2
);
15431 save_cpu_state(ctx
, 1);
15432 gen_helper_di(t0
, cpu_env
);
15433 gen_store_gpr(t0
, rt
);
15435 * Stop translation as we may have switched
15436 * the execution mode.
15438 ctx
->base
.is_jmp
= DISAS_STOP
;
15441 check_insn(ctx
, ISA_MIPS_R2
);
15442 save_cpu_state(ctx
, 1);
15443 gen_helper_ei(t0
, cpu_env
);
15444 gen_store_gpr(t0
, rt
);
15446 * DISAS_STOP isn't sufficient, we need to ensure we break
15447 * out of translated code to check for pending interrupts.
15449 gen_save_pc(ctx
->base
.pc_next
+ 4);
15450 ctx
->base
.is_jmp
= DISAS_EXIT
;
15452 default: /* Invalid */
15453 MIPS_INVAL("mfmc0");
15454 gen_reserved_instruction(ctx
);
15459 #endif /* !CONFIG_USER_ONLY */
15462 check_insn(ctx
, ISA_MIPS_R2
);
15463 gen_load_srsgpr(rt
, rd
);
15466 check_insn(ctx
, ISA_MIPS_R2
);
15467 gen_store_srsgpr(rt
, rd
);
15471 gen_reserved_instruction(ctx
);
15475 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
15476 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15477 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
15478 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15481 /* Arithmetic with immediate opcode */
15482 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15486 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15488 case OPC_SLTI
: /* Set on less than with immediate opcode */
15490 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
15492 case OPC_ANDI
: /* Arithmetic with immediate opcode */
15493 case OPC_LUI
: /* OPC_AUI */
15496 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
15498 case OPC_J
: /* Jump */
15500 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15501 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15504 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
15505 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15507 gen_reserved_instruction(ctx
);
15510 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
15511 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15514 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15517 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
15518 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15520 gen_reserved_instruction(ctx
);
15523 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
15524 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15527 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15530 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
15533 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15535 check_insn(ctx
, ISA_MIPS_R6
);
15536 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
15537 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15540 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
15543 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15545 check_insn(ctx
, ISA_MIPS_R6
);
15546 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
15547 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15552 check_insn(ctx
, ISA_MIPS2
);
15553 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15557 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
15559 case OPC_LL
: /* Load and stores */
15560 check_insn(ctx
, ISA_MIPS2
);
15561 if (ctx
->insn_flags
& INSN_R5900
) {
15562 check_insn_opc_user_only(ctx
, INSN_R5900
);
15573 gen_ld(ctx
, op
, rt
, rs
, imm
);
15580 gen_st(ctx
, op
, rt
, rs
, imm
);
15583 check_insn(ctx
, ISA_MIPS2
);
15584 if (ctx
->insn_flags
& INSN_R5900
) {
15585 check_insn_opc_user_only(ctx
, INSN_R5900
);
15587 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
15590 check_cp0_enabled(ctx
);
15591 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
15592 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
15593 gen_cache_operation(ctx
, rt
, rs
, imm
);
15595 /* Treat as NOP. */
15598 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
| INSN_R5900
);
15599 /* Treat as NOP. */
15602 /* Floating point (COP1). */
15607 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
15611 op1
= MASK_CP1(ctx
->opcode
);
15616 check_cp1_enabled(ctx
);
15617 check_insn(ctx
, ISA_MIPS_R2
);
15623 check_cp1_enabled(ctx
);
15624 gen_cp1(ctx
, op1
, rt
, rd
);
15626 #if defined(TARGET_MIPS64)
15629 check_cp1_enabled(ctx
);
15630 check_insn(ctx
, ISA_MIPS3
);
15631 check_mips_64(ctx
);
15632 gen_cp1(ctx
, op1
, rt
, rd
);
15635 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
15636 check_cp1_enabled(ctx
);
15637 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15639 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15644 check_insn(ctx
, ASE_MIPS3D
);
15645 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15646 (rt
>> 2) & 0x7, imm
<< 2);
15650 check_cp1_enabled(ctx
);
15651 check_insn(ctx
, ISA_MIPS_R6
);
15652 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
15656 check_cp1_enabled(ctx
);
15657 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15659 check_insn(ctx
, ASE_MIPS3D
);
15662 check_cp1_enabled(ctx
);
15663 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15664 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
15665 (rt
>> 2) & 0x7, imm
<< 2);
15672 check_cp1_enabled(ctx
);
15673 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15679 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
15680 check_cp1_enabled(ctx
);
15681 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15683 case R6_OPC_CMP_AF_S
:
15684 case R6_OPC_CMP_UN_S
:
15685 case R6_OPC_CMP_EQ_S
:
15686 case R6_OPC_CMP_UEQ_S
:
15687 case R6_OPC_CMP_LT_S
:
15688 case R6_OPC_CMP_ULT_S
:
15689 case R6_OPC_CMP_LE_S
:
15690 case R6_OPC_CMP_ULE_S
:
15691 case R6_OPC_CMP_SAF_S
:
15692 case R6_OPC_CMP_SUN_S
:
15693 case R6_OPC_CMP_SEQ_S
:
15694 case R6_OPC_CMP_SEUQ_S
:
15695 case R6_OPC_CMP_SLT_S
:
15696 case R6_OPC_CMP_SULT_S
:
15697 case R6_OPC_CMP_SLE_S
:
15698 case R6_OPC_CMP_SULE_S
:
15699 case R6_OPC_CMP_OR_S
:
15700 case R6_OPC_CMP_UNE_S
:
15701 case R6_OPC_CMP_NE_S
:
15702 case R6_OPC_CMP_SOR_S
:
15703 case R6_OPC_CMP_SUNE_S
:
15704 case R6_OPC_CMP_SNE_S
:
15705 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15707 case R6_OPC_CMP_AF_D
:
15708 case R6_OPC_CMP_UN_D
:
15709 case R6_OPC_CMP_EQ_D
:
15710 case R6_OPC_CMP_UEQ_D
:
15711 case R6_OPC_CMP_LT_D
:
15712 case R6_OPC_CMP_ULT_D
:
15713 case R6_OPC_CMP_LE_D
:
15714 case R6_OPC_CMP_ULE_D
:
15715 case R6_OPC_CMP_SAF_D
:
15716 case R6_OPC_CMP_SUN_D
:
15717 case R6_OPC_CMP_SEQ_D
:
15718 case R6_OPC_CMP_SEUQ_D
:
15719 case R6_OPC_CMP_SLT_D
:
15720 case R6_OPC_CMP_SULT_D
:
15721 case R6_OPC_CMP_SLE_D
:
15722 case R6_OPC_CMP_SULE_D
:
15723 case R6_OPC_CMP_OR_D
:
15724 case R6_OPC_CMP_UNE_D
:
15725 case R6_OPC_CMP_NE_D
:
15726 case R6_OPC_CMP_SOR_D
:
15727 case R6_OPC_CMP_SUNE_D
:
15728 case R6_OPC_CMP_SNE_D
:
15729 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
15732 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
15733 rt
, rd
, sa
, (imm
>> 8) & 0x7);
15738 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
15745 gen_reserved_instruction(ctx
);
15750 /* Compact branches [R6] and COP2 [non-R6] */
15751 case OPC_BC
: /* OPC_LWC2 */
15752 case OPC_BALC
: /* OPC_SWC2 */
15753 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15754 /* OPC_BC, OPC_BALC */
15755 gen_compute_compact_branch(ctx
, op
, 0, 0,
15756 sextract32(ctx
->opcode
<< 2, 0, 28));
15757 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15758 gen_loongson_lswc2(ctx
, rt
, rs
, rd
);
15760 /* OPC_LWC2, OPC_SWC2 */
15761 /* COP2: Not implemented. */
15762 generate_exception_err(ctx
, EXCP_CpU
, 2);
15765 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
15766 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
15767 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15769 /* OPC_BEQZC, OPC_BNEZC */
15770 gen_compute_compact_branch(ctx
, op
, rs
, 0,
15771 sextract32(ctx
->opcode
<< 2, 0, 23));
15773 /* OPC_JIC, OPC_JIALC */
15774 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
15776 } else if (ctx
->insn_flags
& ASE_LEXT
) {
15777 gen_loongson_lsdc2(ctx
, rt
, rs
, rd
);
15779 /* OPC_LWC2, OPC_SWC2 */
15780 /* COP2: Not implemented. */
15781 generate_exception_err(ctx
, EXCP_CpU
, 2);
15785 check_insn(ctx
, ASE_LMMI
);
15786 /* Note that these instructions use different fields. */
15787 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
15791 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
15792 check_cp1_enabled(ctx
);
15793 op1
= MASK_CP3(ctx
->opcode
);
15797 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15803 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15804 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
15807 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15808 /* Treat as NOP. */
15811 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
15825 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
15826 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
15830 gen_reserved_instruction(ctx
);
15834 generate_exception_err(ctx
, EXCP_CpU
, 1);
15838 #if defined(TARGET_MIPS64)
15839 /* MIPS64 opcodes */
15841 if (ctx
->insn_flags
& INSN_R5900
) {
15842 check_insn_opc_user_only(ctx
, INSN_R5900
);
15849 check_insn(ctx
, ISA_MIPS3
);
15850 check_mips_64(ctx
);
15851 gen_ld(ctx
, op
, rt
, rs
, imm
);
15856 check_insn(ctx
, ISA_MIPS3
);
15857 check_mips_64(ctx
);
15858 gen_st(ctx
, op
, rt
, rs
, imm
);
15861 check_insn(ctx
, ISA_MIPS3
);
15862 if (ctx
->insn_flags
& INSN_R5900
) {
15863 check_insn_opc_user_only(ctx
, INSN_R5900
);
15865 check_mips_64(ctx
);
15866 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEUQ
, false);
15868 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
15869 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15870 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
15871 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15874 check_insn(ctx
, ISA_MIPS3
);
15875 check_mips_64(ctx
);
15876 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15880 check_insn(ctx
, ISA_MIPS3
);
15881 check_mips_64(ctx
);
15882 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
15885 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
15886 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15887 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
15889 MIPS_INVAL("major opcode");
15890 gen_reserved_instruction(ctx
);
15894 case OPC_DAUI
: /* OPC_JALX */
15895 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15896 #if defined(TARGET_MIPS64)
15898 check_mips_64(ctx
);
15900 generate_exception(ctx
, EXCP_RI
);
15901 } else if (rt
!= 0) {
15902 TCGv t0
= tcg_temp_new();
15903 gen_load_gpr(t0
, rs
);
15904 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
15908 gen_reserved_instruction(ctx
);
15909 MIPS_INVAL("major opcode");
15913 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
15914 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
15915 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
15919 /* MDMX: Not implemented. */
15922 check_insn(ctx
, ISA_MIPS_R6
);
15923 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
15925 default: /* Invalid */
15926 MIPS_INVAL("major opcode");
15932 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
15934 /* make sure instructions are on a word boundary */
15935 if (ctx
->base
.pc_next
& 0x3) {
15936 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
15937 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
15941 /* Handle blikely not taken case */
15942 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
15943 TCGLabel
*l1
= gen_new_label();
15945 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
15946 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
15947 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
15951 /* Transition to the auto-generated decoder. */
15953 /* Vendor specific extensions */
15954 if (cpu_supports_isa(env
, INSN_R5900
) && decode_ext_txx9(ctx
, ctx
->opcode
)) {
15957 if (cpu_supports_isa(env
, INSN_VR54XX
) && decode_ext_vr54xx(ctx
, ctx
->opcode
)) {
15960 #if defined(TARGET_MIPS64)
15961 if (cpu_supports_isa(env
, INSN_OCTEON
) && decode_ext_octeon(ctx
, ctx
->opcode
)) {
15966 /* ISA extensions */
15967 if (ase_msa_available(env
) && decode_ase_msa(ctx
, ctx
->opcode
)) {
15971 /* ISA (from latest to oldest) */
15972 if (cpu_supports_isa(env
, ISA_MIPS_R6
) && decode_isa_rel6(ctx
, ctx
->opcode
)) {
15976 if (decode_opc_legacy(env
, ctx
)) {
15980 gen_reserved_instruction(ctx
);
15983 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
15985 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
15986 CPUMIPSState
*env
= cs
->env_ptr
;
15988 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
15989 ctx
->saved_pc
= -1;
15990 ctx
->insn_flags
= env
->insn_flags
;
15991 ctx
->CP0_Config0
= env
->CP0_Config0
;
15992 ctx
->CP0_Config1
= env
->CP0_Config1
;
15993 ctx
->CP0_Config2
= env
->CP0_Config2
;
15994 ctx
->CP0_Config3
= env
->CP0_Config3
;
15995 ctx
->CP0_Config5
= env
->CP0_Config5
;
15997 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
15998 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
15999 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
16000 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
16001 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
16002 ctx
->PAMask
= env
->PAMask
;
16003 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
16004 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
16005 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
16006 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
16007 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
16008 /* Restore delay slot state from the tb context. */
16009 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
16010 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
16011 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
16012 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
16013 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
16014 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
16015 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
16016 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
16017 ctx
->mi
= (env
->CP0_Config5
>> CP0C5_MI
) & 1;
16018 ctx
->gi
= (env
->CP0_Config5
>> CP0C5_GI
) & 3;
16019 restore_cpu_state(env
, ctx
);
16020 #ifdef CONFIG_USER_ONLY
16021 ctx
->mem_idx
= MIPS_HFLAG_UM
;
16023 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
16025 ctx
->default_tcg_memop_mask
= (!(ctx
->insn_flags
& ISA_NANOMIPS32
) &&
16026 (ctx
->insn_flags
& (ISA_MIPS_R6
|
16027 INSN_LOONGSON3A
))) ? MO_UNALN
: MO_ALIGN
;
16030 * Execute a branch and its delay slot as a single instruction.
16031 * This is what GDB expects and is consistent with what the
16032 * hardware does (e.g. if a delay slot instruction faults, the
16033 * reported PC is the PC of the branch).
16035 if (ctx
->base
.singlestep_enabled
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
16036 ctx
->base
.max_insns
= 2;
16039 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
16043 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16047 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
16049 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16051 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
16055 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
16057 CPUMIPSState
*env
= cs
->env_ptr
;
16058 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16062 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
16063 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
16064 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16065 insn_bytes
= decode_isa_nanomips(env
, ctx
);
16066 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
16067 ctx
->opcode
= translator_ldl(env
, &ctx
->base
, ctx
->base
.pc_next
);
16069 decode_opc(env
, ctx
);
16070 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
16071 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16072 insn_bytes
= decode_isa_micromips(env
, ctx
);
16073 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
16074 ctx
->opcode
= translator_lduw(env
, &ctx
->base
, ctx
->base
.pc_next
);
16075 insn_bytes
= decode_ase_mips16e(env
, ctx
);
16077 gen_reserved_instruction(ctx
);
16078 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
16082 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
16083 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
16084 MIPS_HFLAG_FBNSLOT
))) {
16086 * Force to generate branch as there is neither delay nor
16091 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
16092 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
16094 * Force to generate branch as microMIPS R6 doesn't restrict
16095 * branches in the forbidden slot.
16101 gen_branch(ctx
, insn_bytes
);
16103 if (ctx
->base
.is_jmp
== DISAS_SEMIHOST
) {
16104 generate_exception_err(ctx
, EXCP_SEMIHOST
, insn_bytes
);
16106 ctx
->base
.pc_next
+= insn_bytes
;
16108 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
16113 * End the TB on (most) page crossings.
16114 * See mips_tr_init_disas_context about single-stepping a branch
16115 * together with its delay slot.
16117 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
16118 && !ctx
->base
.singlestep_enabled
) {
16119 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
16123 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
16125 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
16127 switch (ctx
->base
.is_jmp
) {
16129 gen_save_pc(ctx
->base
.pc_next
);
16130 tcg_gen_lookup_and_goto_ptr();
16133 case DISAS_TOO_MANY
:
16134 save_cpu_state(ctx
, 0);
16135 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
16138 tcg_gen_exit_tb(NULL
, 0);
16140 case DISAS_NORETURN
:
16143 g_assert_not_reached();
16147 static void mips_tr_disas_log(const DisasContextBase
*dcbase
,
16148 CPUState
*cs
, FILE *logfile
)
16150 fprintf(logfile
, "IN: %s\n", lookup_symbol(dcbase
->pc_first
));
16151 target_disas(logfile
, cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
16154 static const TranslatorOps mips_tr_ops
= {
16155 .init_disas_context
= mips_tr_init_disas_context
,
16156 .tb_start
= mips_tr_tb_start
,
16157 .insn_start
= mips_tr_insn_start
,
16158 .translate_insn
= mips_tr_translate_insn
,
16159 .tb_stop
= mips_tr_tb_stop
,
16160 .disas_log
= mips_tr_disas_log
,
16163 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
,
16164 target_ulong pc
, void *host_pc
)
16168 translator_loop(cs
, tb
, max_insns
, pc
, host_pc
, &mips_tr_ops
, &ctx
.base
);
16171 void mips_tcg_init(void)
16176 for (i
= 1; i
< 32; i
++)
16177 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
16178 offsetof(CPUMIPSState
,
16181 #if defined(TARGET_MIPS64)
16182 cpu_gpr_hi
[0] = NULL
;
16184 for (unsigned i
= 1; i
< 32; i
++) {
16185 g_autofree
char *rname
= g_strdup_printf("%s[hi]", regnames
[i
]);
16187 cpu_gpr_hi
[i
] = tcg_global_mem_new_i64(cpu_env
,
16188 offsetof(CPUMIPSState
,
16189 active_tc
.gpr_hi
[i
]),
16192 #endif /* !TARGET_MIPS64 */
16193 for (i
= 0; i
< 32; i
++) {
16194 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
16196 fpu_f64
[i
] = tcg_global_mem_new_i64(cpu_env
, off
, fregnames
[i
]);
16198 msa_translate_init();
16199 cpu_PC
= tcg_global_mem_new(cpu_env
,
16200 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
16201 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
16202 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
16203 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
16205 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
16206 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
16209 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
16210 offsetof(CPUMIPSState
,
16211 active_tc
.DSPControl
),
16213 bcond
= tcg_global_mem_new(cpu_env
,
16214 offsetof(CPUMIPSState
, bcond
), "bcond");
16215 btarget
= tcg_global_mem_new(cpu_env
,
16216 offsetof(CPUMIPSState
, btarget
), "btarget");
16217 hflags
= tcg_global_mem_new_i32(cpu_env
,
16218 offsetof(CPUMIPSState
, hflags
), "hflags");
16220 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
16221 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
16223 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
16224 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
16226 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
16228 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
16231 if (TARGET_LONG_BITS
== 32) {
16232 mxu_translate_init();
16236 void mips_restore_state_to_opc(CPUState
*cs
,
16237 const TranslationBlock
*tb
,
16238 const uint64_t *data
)
16240 MIPSCPU
*cpu
= MIPS_CPU(cs
);
16241 CPUMIPSState
*env
= &cpu
->env
;
16243 env
->active_tc
.PC
= data
[0];
16244 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
16245 env
->hflags
|= data
[1];
16246 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
16247 case MIPS_HFLAG_BR
:
16249 case MIPS_HFLAG_BC
:
16250 case MIPS_HFLAG_BL
:
16252 env
->btarget
= data
[2];